From db8880fa950ebf8d690c33ebc614e871cf15dc94 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:57:15 +0100 Subject: [PATCH 001/201] drafted requirements --- services/director/requirements/Makefile | 7 - services/director/requirements/_base.in | 77 ++----- services/director/requirements/_base.txt | 152 ------------- services/director/requirements/_test.in | 31 +-- services/director/requirements/_test.txt | 251 ---------------------- services/director/requirements/_tools.in | 6 +- services/director/requirements/_tools.txt | 45 ---- services/director/requirements/ci.txt | 10 +- services/director/requirements/dev.txt | 7 + services/director/requirements/prod.txt | 7 +- 10 files changed, 49 insertions(+), 544 deletions(-) delete mode 100644 services/director/requirements/_base.txt delete mode 100644 services/director/requirements/_test.txt delete mode 100644 services/director/requirements/_tools.txt diff --git a/services/director/requirements/Makefile b/services/director/requirements/Makefile index 7aacec9e5ee..3f25442b790 100644 --- a/services/director/requirements/Makefile +++ b/services/director/requirements/Makefile @@ -4,10 +4,3 @@ include ../../../requirements/base.Makefile # Add here any extra explicit dependency: e.g. _migration.txt: _base.txt - - -_test.txt: _base.txt _test.in - ## NOTE: this recipe override has to be removed - ## to execute target upgrades e.g. due to vulnerability of - ## a library. - @echo INFO: test.txt is frozen. Skipping upgrade. diff --git a/services/director/requirements/_base.in b/services/director/requirements/_base.in index 0618d6c7759..2c0510eb962 100644 --- a/services/director/requirements/_base.in +++ b/services/director/requirements/_base.in @@ -1,70 +1,19 @@ # -# Specifies third-party dependencies for 'director' +# Specifies third-party dependencies for 'services/web/server/src' # +--constraint ../../../requirements/constraints.txt -# IMPORTANT: All requirements (including the packages in this repository) as FROZEN to those in itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# - current service is going to be replaced by director-v2 -# -# +# intra-repo required dependencies +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in -# This list was obtained as follows -# -# $ docker pull itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0: Pulling from itisfoundation/director -# Digest: sha256:84ba999ca348bf9d56d9ef0af2e3494ede0cd06d357d289e2a09a4191e7a56d3 -# Status: Image is up to date for itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# docker.io/itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# -# $ docker inspect itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0| jq '.[0] | .RepoTags, .ContainerConfig.Labels' -# [ -# "itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0" -# ] -# { -# "io.osparc.api-version": "0.1.0", -# "maintainer": "sanderegg", -# "org.label-schema.build-date": "2020-11-05T14:02:31Z", -# "org.label-schema.schema-version": "1.0", -# "org.label-schema.vcs-ref": "c8669fb", -# "org.label-schema.vcs-url": "https://github.com/ITISFoundation/osparc-simcore.git" -# } -# -# $ docker run -it itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 pip freeze -# +# service-library[fastapi] +--requirement ../../../packages/service-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_fastapi.in -aiodebug==1.1.2 -aiodocker==0.14.0 -aiohttp==3.3.2 -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df -aiopg==1.0.0 -aiozipkin==0.7.1 -async-generator==1.10 -async-timeout==3.0.1 -asyncio-extras==1.3.2 -attrs==20.2.0 -certifi==2019.3.9 -chardet==3.0.4 -dataclasses==0.7 -idna==2.8 -idna-ssl==1.1.0 -isodate==0.6.0 -jsonschema==2.6.0 -lazy-object-proxy==1.4.3 -multidict==4.5.2 -openapi-core==0.12.0 -openapi-spec-validator==0.2.9 -prometheus-client==0.8.0 -psycopg2-binary==2.8.6 -pydantic==1.7.2 -PyYAML==5.4 # CVE-2020-1747 -requests==2.27.1 # -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library -six==1.12.0 -SQLAlchemy==1.3.20 -strict-rfc3339==0.7 -tenacity==6.0.0 -trafaret==2.1.0 -ujson==4.0.1 -urllib3==1.26.5 # CVE-2021-33503 -Werkzeug==1.0.1 -yarl==1.3.0 +aiodocker +fastapi[all] +httpx +prometheus-client +pydantic diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt deleted file mode 100644 index 2c6e016526f..00000000000 --- a/services/director/requirements/_base.txt +++ /dev/null @@ -1,152 +0,0 @@ -aiodebug==1.1.2 - # via - # -r requirements/_base.in - # simcore-service-library -aiodocker==0.14.0 - # via -r requirements/_base.in -aiohttp==3.3.2 - # via - # -r requirements/_base.in - # aiodocker - # aiohttp-apiset - # aiozipkin - # simcore-service-library -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df - # via -r requirements/_base.in -aiopg==1.0.0 - # via - # -r requirements/_base.in - # simcore-service-library -aiozipkin==0.7.1 - # via - # -r requirements/_base.in - # simcore-service-library -async-generator==1.10 - # via - # -r requirements/_base.in - # asyncio-extras -async-timeout==3.0.1 - # via - # -r requirements/_base.in - # aiohttp -asyncio-extras==1.3.2 - # via -r requirements/_base.in -attrs==20.2.0 - # via - # -r requirements/_base.in - # aiohttp - # openapi-core - # simcore-service-library -certifi==2019.3.9 - # via - # -r requirements/_base.in - # requests -chardet==3.0.4 - # via - # -r requirements/_base.in - # aiohttp -charset-normalizer==2.0.12 - # via requests -dataclasses==0.7 - # via -r requirements/_base.in -idna==2.8 - # via - # -r requirements/_base.in - # idna-ssl - # requests - # yarl -idna-ssl==1.1.0 - # via - # -r requirements/_base.in - # aiohttp -isodate==0.6.0 - # via - # -r requirements/_base.in - # openapi-core -jsonschema==2.6.0 - # via - # -r requirements/_base.in - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -lazy-object-proxy==1.4.3 - # via - # -r requirements/_base.in - # openapi-core - # simcore-service-library -multidict==4.5.2 - # via - # -r requirements/_base.in - # aiohttp - # yarl -openapi-core==0.12.0 - # via - # -r requirements/_base.in - # simcore-service-library -openapi-spec-validator==0.2.9 - # via - # -r requirements/_base.in - # openapi-core -prometheus-client==0.8.0 - # via - # -r requirements/_base.in - # simcore-service-library -psycopg2-binary==2.8.6 - # via - # -r requirements/_base.in - # aiopg - # simcore-service-library -pydantic==1.7.2 - # via - # -r requirements/_base.in - # simcore-service-library -pyyaml==5.4 - # via - # -r requirements/_base.in - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -requests==2.27.1 - # via -r requirements/_base.in -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library - # via -r requirements/_base.in -six==1.12.0 - # via - # -r requirements/_base.in - # isodate - # openapi-core - # openapi-spec-validator - # tenacity -sqlalchemy==1.3.20 - # via - # -r requirements/_base.in - # simcore-service-library -strict-rfc3339==0.7 - # via - # -r requirements/_base.in - # openapi-core -tenacity==6.0.0 - # via - # -r requirements/_base.in - # simcore-service-library -trafaret==2.1.0 - # via - # -r requirements/_base.in - # simcore-service-library -ujson==4.0.1 - # via - # -r requirements/_base.in - # simcore-service-library -urllib3==1.26.5 - # via - # -r requirements/_base.in - # requests -werkzeug==1.0.1 - # via - # -r requirements/_base.in - # simcore-service-library -yarl==1.3.0 - # via - # -r requirements/_base.in - # aiodocker - # aiohttp diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index d480d049a73..eafeb199342 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -1,32 +1,23 @@ +# Specifies dependencies required to run 'services/api-server/test' +# both for unit and integration tests!! # -# Specifies dependencies required to run 'director' -# - -# frozen specs ---requirement _base.txt +--constraint ../../../requirements/constraints.txt -# NOTE: -# FROZEN (see notes in _base.in) -# DO NOT CHANGE ANYTHING HERE. -# IT WON'T HAVE ANY EFFECT +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt # - -# FROZEN as well (DO NOT CHANGE anything in pytest-simcore, it will have no effect in the director package) -pytest-simcore @ git+https://github.com/ITISFoundation/osparc-simcore.git@79f866219bf650c5eeb4fcdf8f017319087c92c7#egg=pytest-simcore&subdirectory=packages/pytest-simcore - +--constraint _base.txt # testing -aioresponses -coverage==4.5.1 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 docker -openapi-spec-validator~=0.2 # TODO: this library is limiting jsonschema<3 -ptvsd -pylint +faker +jsonref pytest -pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 +pytest-asyncio pytest-cov +pytest-docker pytest-instafail pytest-mock pytest-runner pytest-sugar -python-dotenv +respx diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt deleted file mode 100644 index 97f4c9313fa..00000000000 --- a/services/director/requirements/_test.txt +++ /dev/null @@ -1,251 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.6 -# To update, run: -# -# pip-compile --output-file=requirements/_test.txt --strip-extras requirements/_test.in -# -aiodebug==1.1.2 - # via - # -r requirements/_base.txt - # simcore-service-library -aiodocker==0.14.0 - # via -r requirements/_base.txt -aiohttp==3.3.2 - # via - # -r requirements/_base.txt - # aiodocker - # aiohttp-apiset - # aioresponses - # aiozipkin - # pytest-aiohttp - # simcore-service-library -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df - # via -r requirements/_base.txt -aiopg==1.0.0 - # via - # -r requirements/_base.txt - # simcore-service-library -aioresponses==0.7.2 - # via -r requirements/_test.in -aiozipkin==0.7.1 - # via - # -r requirements/_base.txt - # simcore-service-library -astroid==2.4.2 - # via pylint -async-generator==1.10 - # via - # -r requirements/_base.txt - # asyncio-extras -async-timeout==3.0.1 - # via - # -r requirements/_base.txt - # aiohttp -asyncio-extras==1.3.2 - # via -r requirements/_base.txt -attrs==20.2.0 - # via - # -r requirements/_base.txt - # aiohttp - # openapi-core - # pytest - # simcore-service-library -certifi==2019.3.9 - # via - # -r requirements/_base.txt - # requests -chardet==3.0.4 - # via - # -r requirements/_base.txt - # aiohttp -charset-normalizer==2.0.12 - # via - # -r requirements/_base.txt - # requests -coverage==4.5.1 - # via - # -r requirements/_test.in - # coveralls - # pytest-cov -dataclasses==0.7 - # via - # -r requirements/_base.txt - # pydantic -docker==4.3.1 - # via -r requirements/_test.in -docopt==0.6.2 - # via coveralls -idna==2.8 - # via - # -r requirements/_base.txt - # idna-ssl - # requests - # yarl -idna-ssl==1.1.0 - # via - # -r requirements/_base.txt - # aiohttp -importlib-metadata==2.0.0 - # via - # pluggy - # pytest -iniconfig==1.1.1 - # via pytest -isodate==0.6.0 - # via - # -r requirements/_base.txt - # openapi-core -isort==5.6.4 - # via pylint -jsonschema==2.6.0 - # via - # -r requirements/_base.txt - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -lazy-object-proxy==1.4.3 - # via - # -r requirements/_base.txt - # astroid - # openapi-core - # simcore-service-library -mccabe==0.6.1 - # via pylint -multidict==4.5.2 - # via - # -r requirements/_base.txt - # aiohttp - # yarl -openapi-core==0.12.0 - # via - # -r requirements/_base.txt - # simcore-service-library -openapi-spec-validator==0.2.9 - # via - # -r requirements/_base.txt - # -r requirements/_test.in - # openapi-core -packaging==20.4 - # via - # pytest - # pytest-sugar -pluggy==0.13.1 - # via pytest -prometheus-client==0.8.0 - # via - # -r requirements/_base.txt - # simcore-service-library -psycopg2-binary==2.8.6 - # via - # -r requirements/_base.txt - # aiopg - # simcore-service-library -ptvsd==4.3.2 - # via -r requirements/_test.in -py==1.9.0 - # via pytest -pydantic==1.7.2 - # via - # -r requirements/_base.txt - # simcore-service-library -pylint==2.6.0 - # via -r requirements/_test.in -pyparsing==2.4.7 - # via packaging -pytest==6.1.2 - # via - # -r requirements/_test.in - # pytest-aiohttp - # pytest-cov - # pytest-instafail - # pytest-mock - # pytest-simcore - # pytest-sugar -pytest-aiohttp==0.3.0 - # via -r requirements/_test.in -pytest-cov==2.10.1 - # via -r requirements/_test.in -pytest-instafail==0.4.2 - # via -r requirements/_test.in -pytest-mock==3.3.1 - # via -r requirements/_test.in -pytest-runner==5.2 - # via -r requirements/_test.in -pytest-simcore @ git+https://github.com/ITISFoundation/osparc-simcore.git@79f866219bf650c5eeb4fcdf8f017319087c92c7#subdirectory=packages/pytest-simcore - # via -r requirements/_test.in -pytest-sugar==0.9.4 - # via -r requirements/_test.in -python-dotenv==0.15.0 - # via -r requirements/_test.in -pyyaml==5.4 - # via - # -r requirements/_base.txt - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -requests==2.27.1 - # via - # -r requirements/_base.txt - # codecov - # coveralls - # docker -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#subdirectory=packages/service-library - # via -r requirements/_base.txt -six==1.12.0 - # via - # -r requirements/_base.txt - # astroid - # docker - # isodate - # openapi-core - # openapi-spec-validator - # packaging - # tenacity - # websocket-client -sqlalchemy==1.3.20 - # via - # -r requirements/_base.txt - # simcore-service-library -strict-rfc3339==0.7 - # via - # -r requirements/_base.txt - # openapi-core -tenacity==6.0.0 - # via - # -r requirements/_base.txt - # simcore-service-library -termcolor==1.1.0 - # via pytest-sugar -toml==0.10.2 - # via - # pylint - # pytest -trafaret==2.1.0 - # via - # -r requirements/_base.txt - # simcore-service-library -typed-ast==1.4.1 - # via astroid -ujson==4.0.1 - # via - # -r requirements/_base.txt - # simcore-service-library -urllib3==1.26.5 - # via - # -r requirements/_base.txt - # requests -websocket-client==0.57.0 - # via docker -werkzeug==1.0.1 - # via - # -r requirements/_base.txt - # simcore-service-library -wrapt==1.12.1 - # via astroid -yarl==1.3.0 - # via - # -r requirements/_base.txt - # aiodocker - # aiohttp -zipp==3.4.0 - # via importlib-metadata diff --git a/services/director/requirements/_tools.in b/services/director/requirements/_tools.in index 05f1ab1646f..52a9a39d162 100644 --- a/services/director/requirements/_tools.in +++ b/services/director/requirements/_tools.in @@ -1,7 +1,7 @@ +--constraint ../../../requirements/constraints.txt --constraint _base.txt --constraint _test.txt +--requirement ../../../requirements/devenv.txt + watchdog[watchmedo] -black~=20.8b0 -pip-tools -bump2version diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt deleted file mode 100644 index 821e63f1a10..00000000000 --- a/services/director/requirements/_tools.txt +++ /dev/null @@ -1,45 +0,0 @@ -appdirs==1.4.4 - # via black -black==20.8b1 - # via -r requirements/_tools.in -bump2version==1.0.1 - # via -r requirements/_tools.in -click==8.0.3 - # via - # black - # pip-tools -mypy-extensions==0.4.3 - # via black -pathspec==0.9.0 - # via black -pep517==0.12.0 - # via pip-tools -pip==24.2 - # via pip-tools -pip-tools==6.4.0 - # via -r requirements/_tools.in -pyyaml==5.4 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # watchdog -regex==2022.1.18 - # via black -setuptools==75.2.0 - # via pip-tools -toml==0.10.2 - # via - # -c requirements/_test.txt - # black -tomli==1.2.3 - # via pep517 -typed-ast==1.4.1 - # via - # -c requirements/_test.txt - # black -typing-extensions==4.0.1 - # via black -watchdog==2.1.6 - # via -r requirements/_tools.in -wheel==0.37.1 - # via pip-tools diff --git a/services/director/requirements/ci.txt b/services/director/requirements/ci.txt index 8edcd5f2bfe..e805fec3802 100644 --- a/services/director/requirements/ci.txt +++ b/services/director/requirements/ci.txt @@ -7,7 +7,15 @@ # # installs base + tests requirements +--requirement _base.txt --requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +simcore-models-library @ ../../packages/models-library +pytest-simcore @ ../../packages/pytest-simcore/ +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ # installs current package -. +simcore-service-director @ . diff --git a/services/director/requirements/dev.txt b/services/director/requirements/dev.txt index dac3f0a494b..f278b7206fd 100644 --- a/services/director/requirements/dev.txt +++ b/services/director/requirements/dev.txt @@ -12,5 +12,12 @@ --requirement _test.txt --requirement _tools.txt + +# installs this repo's packages +--editable ../../packages/models-library +--editable ../../packages/pytest-simcore/ +--editable ../../packages/service-library[fastapi] +--editable ../../packages/settings-library/ + # installs current package --editable . diff --git a/services/director/requirements/prod.txt b/services/director/requirements/prod.txt index dc0ec561efe..8a8b1d29125 100644 --- a/services/director/requirements/prod.txt +++ b/services/director/requirements/prod.txt @@ -9,5 +9,10 @@ # installs base requirements --requirement _base.txt +# installs this repo's packages +simcore-models-library @ ../../packages/models-library +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ + # installs current package -. +simcore-service-director @ . From fd64036a240d6387d443d9c4a6a39f14b2571403 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:01:53 +0100 Subject: [PATCH 002/201] upgraded --- services/director/requirements/_base.txt | 379 ++++++++++++++++++++++ services/director/requirements/_test.txt | 110 +++++++ services/director/requirements/_tools.txt | 86 +++++ 3 files changed, 575 insertions(+) create mode 100644 services/director/requirements/_base.txt create mode 100644 services/director/requirements/_test.txt create mode 100644 services/director/requirements/_tools.txt diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt new file mode 100644 index 00000000000..ea607f5efec --- /dev/null +++ b/services/director/requirements/_base.txt @@ -0,0 +1,379 @@ +aio-pika==9.4.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiocache==0.12.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodocker==0.23.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_base.in +aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiormq==6.8.1 + # via aio-pika +aiosignal==1.3.1 + # via aiohttp +anyio==4.6.2.post1 + # via + # fast-depends + # faststream + # httpx + # starlette + # watchfiles +arrow==1.3.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +attrs==24.2.0 + # via + # aiohttp + # jsonschema + # referencing +certifi==2024.8.30 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # httpcore + # httpx + # requests +charset-normalizer==3.4.0 + # via requests +click==8.1.7 + # via + # typer + # uvicorn +deprecated==1.2.14 + # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-semantic-conventions +dnspython==2.7.0 + # via email-validator +email-validator==2.2.0 + # via + # fastapi + # pydantic +fast-depends==2.4.12 + # via faststream +fastapi==0.99.1 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # prometheus-fastapi-instrumentator +faststream==0.5.28 + # via -r requirements/../../../packages/service-library/requirements/_base.in +frozenlist==1.5.0 + # via + # aiohttp + # aiosignal +googleapis-common-protos==1.65.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.67.1 + # via opentelemetry-exporter-otlp-proto-grpc +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.6 + # via httpx +httptools==0.6.4 + # via uvicorn +httpx==0.27.2 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # fastapi +idna==3.10 + # via + # anyio + # email-validator + # httpx + # requests + # yarl +importlib-metadata==8.4.0 + # via opentelemetry-api +itsdangerous==2.2.0 + # via fastapi +jinja2==3.1.4 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2023.7.1 + # via jsonschema +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +multidict==6.1.0 + # via + # aiohttp + # yarl +opentelemetry-api==1.27.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-otlp==1.27.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-exporter-otlp-proto-common==1.27.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.27.0 + # via opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http==1.27.0 + # via opentelemetry-exporter-otlp +opentelemetry-instrumentation==0.48b0 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-asgi==0.48b0 + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-fastapi==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-requests==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.27.0 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.27.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-semantic-conventions==0.48b0 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.48b0 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests +orjson==3.10.11 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # fastapi +pamqp==3.3.0 + # via aiormq +prometheus-client==0.21.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # prometheus-fastapi-instrumentator +prometheus-fastapi-instrumentator==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +propcache==0.2.0 + # via yarl +protobuf==4.25.5 + # via + # googleapis-common-protos + # opentelemetry-proto +psutil==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pydantic==1.10.18 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/_base.in + # fast-depends + # fastapi +pygments==2.18.0 + # via rich +pyinstrument==5.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +python-dateutil==2.9.0.post0 + # via arrow +python-dotenv==1.0.1 + # via uvicorn +python-multipart==0.0.17 + # via fastapi +pyyaml==6.0.2 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # fastapi + # uvicorn +redis==5.2.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +referencing==0.29.3 + # via + # -c requirements/../../../packages/service-library/requirements/./constraints.txt + # jsonschema + # jsonschema-specifications +repro-zipfile==0.3.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +requests==2.32.3 + # via opentelemetry-exporter-otlp-proto-http +rich==13.9.4 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer +rpds-py==0.20.1 + # via + # jsonschema + # referencing +setuptools==75.3.0 + # via opentelemetry-instrumentation +shellingham==1.5.4 + # via typer +six==1.16.0 + # via python-dateutil +sniffio==1.3.1 + # via + # anyio + # httpx +starlette==0.27.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +tenacity==9.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +toolz==1.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tqdm==4.66.6 + # via -r requirements/../../../packages/service-library/requirements/_base.in +typer==0.12.5 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20241003 + # via arrow +typing-extensions==4.12.2 + # via + # aiodebug + # fastapi + # faststream + # opentelemetry-sdk + # pydantic + # typer +ujson==5.10.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +urllib3==2.2.3 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +uvicorn==0.32.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # fastapi +uvloop==0.21.0 + # via uvicorn +watchfiles==0.24.0 + # via uvicorn +websockets==13.1 + # via uvicorn +wrapt==1.16.0 + # via + # deprecated + # opentelemetry-instrumentation +yarl==1.17.1 + # via + # aio-pika + # aiohttp + # aiormq +zipp==3.20.2 + # via importlib-metadata diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt new file mode 100644 index 00000000000..656c294334f --- /dev/null +++ b/services/director/requirements/_test.txt @@ -0,0 +1,110 @@ +anyio==4.6.2.post1 + # via + # -c requirements/_base.txt + # httpx +attrs==24.2.0 + # via + # -c requirements/_base.txt + # pytest-docker +certifi==2024.8.30 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # httpcore + # httpx + # requests +charset-normalizer==3.4.0 + # via + # -c requirements/_base.txt + # requests +coverage==7.6.4 + # via pytest-cov +docker==7.1.0 + # via -r requirements/_test.in +faker==30.8.2 + # via -r requirements/_test.in +h11==0.14.0 + # via + # -c requirements/_base.txt + # httpcore +httpcore==1.0.6 + # via + # -c requirements/_base.txt + # httpx +httpx==0.27.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # respx +idna==3.10 + # via + # -c requirements/_base.txt + # anyio + # httpx + # requests +iniconfig==2.0.0 + # via pytest +jsonref==1.1.0 + # via -r requirements/_test.in +packaging==24.1 + # via + # pytest + # pytest-sugar +pluggy==1.5.0 + # via pytest +pytest==8.3.3 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-cov + # pytest-docker + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-cov==6.0.0 + # via -r requirements/_test.in +pytest-docker==3.1.1 + # via -r requirements/_test.in +pytest-instafail==0.5.0 + # via -r requirements/_test.in +pytest-mock==3.14.0 + # via -r requirements/_test.in +pytest-runner==6.0.1 + # via -r requirements/_test.in +pytest-sugar==1.0.0 + # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via + # -c requirements/_base.txt + # faker +requests==2.32.3 + # via + # -c requirements/_base.txt + # docker +respx==0.21.1 + # via -r requirements/_test.in +six==1.16.0 + # via + # -c requirements/_base.txt + # python-dateutil +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio + # httpx +termcolor==2.5.0 + # via pytest-sugar +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # faker +urllib3==2.2.3 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # docker + # requests diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt new file mode 100644 index 00000000000..3c83af3ad3e --- /dev/null +++ b/services/director/requirements/_tools.txt @@ -0,0 +1,86 @@ +astroid==3.3.5 + # via pylint +black==24.10.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # -c requirements/_base.txt + # black + # pip-tools +dill==0.3.9 + # via pylint +distlib==0.3.9 + # via virtualenv +filelock==3.16.1 + # via virtualenv +identify==2.6.1 + # via pre-commit +isort==5.13.2 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.13.0 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.0.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==24.1 + # via + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==24.3.1 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.6 + # via + # black + # pylint + # virtualenv +pre-commit==4.0.1 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.1 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pre-commit + # watchdog +ruff==0.7.2 + # via -r requirements/../../../requirements/devenv.txt +setuptools==75.3.0 + # via + # -c requirements/_base.txt + # pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.27.1 + # via pre-commit +watchdog==6.0.0 + # via -r requirements/_tools.in +wheel==0.44.0 + # via pip-tools From 7f0829351cc7acf25fb2b7d09817f115ae45aad2 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:04:48 +0100 Subject: [PATCH 003/201] rm --- services/director/codegen.sh | 157 --------------------- services/director/temp_generate_openapi.sh | 37 ----- 2 files changed, 194 deletions(-) delete mode 100755 services/director/codegen.sh delete mode 100755 services/director/temp_generate_openapi.sh diff --git a/services/director/codegen.sh b/services/director/codegen.sh deleted file mode 100755 index bd5b6600cf6..00000000000 --- a/services/director/codegen.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/bash -# define the input specification file and the output directory -# typical structure: -# /src/package-name/.openapi/v1/package_api.yaml -- this is the input file -# /src/package-name/rest/generated_code -- this is the output directory -SOURCE_DIR=./src/simcore_service_director -API_VERSION=v0 -INPUT_SPEC=${SOURCE_DIR}/api/${API_VERSION}/openapi.yaml -OUTPUT_DIR=${SOURCE_DIR}/rest -OUTPUT_DIR_GEN=${SOURCE_DIR}/rest/generated_code -INIT_FILE_PATH=${OUTPUT_DIR}/__init__.py -HANDLERS_FILE_PATH=${OUTPUT_DIR}/handlers.py -ROUTING_FILE_PATH=${OUTPUT_DIR_GEN}/routing.py - -# create the folder for the output -mkdir -p $OUTPUT_DIR -# generate the python server models code -ABSOLUTE_INPUT_PATH=$(realpath "${INPUT_SPEC}") -ABSOLUTE_OUTPUT_DIR=$(realpath "${OUTPUT_DIR}") -ABSOLUTE_OUTPUT_DIR_GEN=$(realpath "${OUTPUT_DIR_GEN}") -../../scripts/openapi/openapi_python_server_codegen.sh -i ${ABSOLUTE_INPUT_PATH} -o ${ABSOLUTE_OUTPUT_DIR_GEN} -# replace import entries in python code -find ${OUTPUT_DIR_GEN}/models -type f -exec sed -i 's/openapi_server.models././g' {} \; -find ${OUTPUT_DIR_GEN}/models -type f -exec sed -i 's/openapi_server/../g' {} \; -find ${OUTPUT_DIR_GEN} -maxdepth 1 -type f -exec sed -i 's/openapi_server/./g' {} \; -# create __init__.py if always -cat > "${INIT_FILE_PATH}" << EOF -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. -""" -from .generated_code import ( - models, - util, - routing -) -EOF - -# only generate stub if necessary -if [ ! -e "${HANDLERS_FILE_PATH}" ]; then - cat > "${HANDLERS_FILE_PATH}" << EOF -"""This is a generated stub of handlers to be connected to the paths defined in the API - -""" -import logging - -from aiohttp import web_exceptions - -log = logging.getLogger(__name__) - -# This module shall contain the handlers of the API (implementation side of the openapi server side). -# Each operation is typically defined as -# async def root_get(request): -# return "hello API world" - -# The API shall define a path where the entry operationId: -# operationId: root_get -EOF -fi - -# always generate routing -cat > "${ROUTING_FILE_PATH}" << EOF -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. - -use create_web_app to initialise the web application using the specification file. -The base folder is the root of the package. -""" - - -import logging -from pathlib import Path - -from aiohttp import hdrs, web -from aiohttp_apiset import SwaggerRouter -from aiohttp_apiset.exceptions import ValidationError -from aiohttp_apiset.middlewares import Jsonify, jsonify -from aiohttp_apiset.swagger.loader import ExtendedSchemaFile -from aiohttp_apiset.swagger.operations import OperationIdMapping - -from .. import handlers -from .models.base_model_ import Model - -log = logging.getLogger(__name__) - -@web.middleware -async def __handle_errors(request, handler): - try: - log.debug("error middleware handling request %s to handler %s", request, handler) - response = await handler(request) - return response - except ValidationError as ex: - # aiohttp apiset errors - log.exception("error happened in handling route") - error = dict(status=ex.status, message=ex.to_tree()) - error_enveloped = dict(error=error) - return web.json_response(error_enveloped, status=ex.status) - except web.HTTPError as ex: - log.exception("error happened in handling route") - error = dict(status=ex.status, message=str(ex.reason)) - error_enveloped = dict(data=error) - return web.json_response(error_enveloped, status=ex.status) - - -def create_web_app(base_folder, spec_file, additional_middlewares = None): - # create the default mapping of the operationId to the implementation code in handlers - opmap = __create_default_operation_mapping(Path(base_folder / spec_file)) - - # generate a version 3 of the API documentation - router = SwaggerRouter( - swagger_ui='/apidoc/', - version_ui=3, # forces the use of version 3 by default - search_dirs=[base_folder], - default_validate=True, - ) - - # add automatic jsonification of the models located in generated code - jsonify.singleton = Jsonify(indent=3, ensure_ascii=False) - jsonify.singleton.add_converter(Model, lambda o: o.to_dict(), score=0) - - middlewares = [jsonify, __handle_errors] - if additional_middlewares: - middlewares.extend(additional_middlewares) - # create the web application using the API - app = web.Application( - router=router, - middlewares=middlewares, - ) - router.set_cors(app, domains='*', headers=( - (hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, hdrs.AUTHORIZATION), - )) - - # Include our specifications in a router, - # is now available in the swagger-ui to the address http://localhost:8080/swagger/?spec=v1 - router.include( - spec=Path(base_folder / spec_file), - operationId_mapping=opmap, - name='v0', # name to access in swagger-ui, - basePath="/v0" # BUG: in apiset with openapi 3.0.0 [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) - ) - - return app - -def __create_default_operation_mapping(specs_file): - operation_mapping = {} - yaml_specs = ExtendedSchemaFile(specs_file) - paths = yaml_specs['paths'] - for path in paths.items(): - for method in path[1].items(): # can be get, post, patch, put, delete... - op_str = "operationId" - if op_str not in method[1]: - raise Exception("The API %s does not contain the operationId tag for route %s %s" % (specs_file, path[0], method[0])) - operation_id = method[1][op_str] - operation_mapping[operation_id] = getattr(handlers, operation_id) - return OperationIdMapping(**operation_mapping) -EOF diff --git a/services/director/temp_generate_openapi.sh b/services/director/temp_generate_openapi.sh deleted file mode 100755 index 533053087ef..00000000000 --- a/services/director/temp_generate_openapi.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -set -e - -cd $(dirname $0) -usage() -{ - echo "usage: temp_generate_openapi.sh [[[-i input]] | [-h help]]" -} - -apihub_specs_dir= -# process arguments -while [ "$1" != "" ]; do - case $1 in - -i | --input ) shift - apihub_specs_dir=$1 - ;; - -h | --help ) usage - exit - ;; - * ) usage - exit 1 - esac - shift -done - -if [ -z "$apihub_specs_dir" ]; then - echo "please define an apihub specs directory..." - usage - exit 1 -fi - -docker run \ - -v $apihub_specs_dir:/input \ - -v ${PWD}/src/simcore_service_director/api/v0:/output \ - itisfoundation/oas_resolver \ - /input/director/v0/openapi.yaml \ - /output/openapi.yaml From 5e7d7a5a417b046cc791d69767f1cf2f8ed37029 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:13:10 +0100 Subject: [PATCH 004/201] setup --- services/director/Makefile | 10 ----- services/director/setup.cfg | 6 --- services/director/setup.py | 88 +++++++++++++++++++------------------ 3 files changed, 45 insertions(+), 59 deletions(-) diff --git a/services/director/Makefile b/services/director/Makefile index 0e91426d6d2..140d05c72d0 100644 --- a/services/director/Makefile +++ b/services/director/Makefile @@ -3,13 +3,3 @@ # include ../../scripts/common.Makefile include ../../scripts/common-service.Makefile - - -_check_python_version: - # Checking that runs with correct python version - @python3 -c "import sys; current_version=[int(d) for d in '3.6'.split('.')]; assert sys.version_info[:2]==tuple(current_version[:2]), f'Expected python $(EXPECTED_PYTHON_VERSION), got {sys.version_info}'" - - -.PHONY: openapi-specs -openapi-specs: ## updates and validates openapi specifications - $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 8e7e8ea592f..46680238b7d 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -5,10 +5,4 @@ message = director api version: {current_version} → {new_version} tag = False commit_args = --no-verify -[bumpversion:file:setup.py] -search = "{current_version}" -replace = "{new_version}" - [bumpversion:file:VERSION] -[bumpversion:file:../../api/specs/director/openapi.yaml] -[bumpversion:file:./src/simcore_service_director/api/v0/openapi.yaml] diff --git a/services/director/setup.py b/services/director/setup.py index 8c12d36f5cb..4d8fdfcc7e6 100644 --- a/services/director/setup.py +++ b/services/director/setup.py @@ -4,63 +4,65 @@ from setuptools import find_packages, setup -here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -if not (sys.version_info.major == 3 and sys.version_info.minor == 6): - raise RuntimeError( - "Requires <=3.6, got %s. Did you forget to activate virtualenv?" - % sys.version_info - ) +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } -def read_reqs(reqs_path: Path): - reqs = re.findall( - r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", reqs_path.read_text(), re.MULTILINE - ) - # TODO: temporary excluding requirements using git - # https://pip.pypa.io/en/stable/reference/pip_install/#vcs-support - return [r for r in reqs if not r.startswith("git")] +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -install_requirements = read_reqs(here / "requirements" / "_base.txt") + [ - "aiohttp-apiset", - "simcore-service-library", -] +NAME = "simcore-service-director" +VERSION = (CURRENT_DIR / "VERSION").read_text().strip() +AUTHORS = ("Sylvain Anderegg (sanderegg)",) +DESCRIPTION = "oSparc Director webserver service" +README = (CURRENT_DIR / "README.md").read_text() -test_requirements = read_reqs(here / "requirements" / "_test.txt") +PROD_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.txt") + | { + "simcore-models-library", + "simcore-service-library[fastapi]", + "simcore-settings-library", + } +) + +TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -_CONFIG = dict( - name="simcore-service-director", - version="0.1.0", - description="oSparc Director webserver service", - author="Sylvain Anderegg (sanderegg)", - python_requires="~=3.6", - packages=find_packages(where="src"), - package_dir={ + +SETUP = { + "name": NAME, + "version": VERSION, + "author": AUTHORS, + "description": DESCRIPTION, + "long_description": README, + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - include_package_data=True, - install_requires=install_requirements, - tests_require=test_requirements, - setup_requires=["pytest-runner"], - package_data={ - "": ["api/v0/openapi.yaml", "api/v0/schemas/*.json"], - }, - entry_points={ + "include_package_data": True, + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-director = simcore_service_director.__main__:main", "simcore-service = simcore_service_director.__main__:main", ], }, -) - - -def main(): - """Execute the setup commands.""" - setup(**_CONFIG) - return 0 # syccessful termination - +} if __name__ == "__main__": - raise SystemExit(main()) + setup(**SETUP) From e13f2269a4241d4704438c16536d5437ffe52280 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:15:08 +0100 Subject: [PATCH 005/201] =?UTF-8?q?director=20api=20version:=200.1.0=20?= =?UTF-8?q?=E2=86=92=201.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/director/VERSION | 2 +- services/director/setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/VERSION b/services/director/VERSION index 6e8bf73aa55..3eefcb9dd5b 100644 --- a/services/director/VERSION +++ b/services/director/VERSION @@ -1 +1 @@ -0.1.0 +1.0.0 diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 46680238b7d..1eb089c0af8 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.1.0 +current_version = 1.0.0 commit = True message = director api version: {current_version} → {new_version} tag = False From add6a76c1b8cd44027935a9c4ca8d3667a540ad9 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:18:35 +0100 Subject: [PATCH 006/201] unreadme --- services/director/README.md | 85 ------------------------------------- 1 file changed, 85 deletions(-) diff --git a/services/director/README.md b/services/director/README.md index 21146025f16..d919b4f4e98 100644 --- a/services/director/README.md +++ b/services/director/README.md @@ -1,86 +1 @@ # director - -[![Docker Pulls](https://img.shields.io/docker/pulls/itisfoundation/director.svg)](https://hub.docker.com/r/itisfoundation/director/tags) -[![](https://images.microbadger.com/badges/image/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") -[![](https://images.microbadger.com/badges/version/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") -[![](https://images.microbadger.com/badges/commit/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") - - -## Usage - -```bash - # go to director folder - cd /services/director - # install - pip install . - # start director - simcore-service-director - # or - python -m simcore_service_director -``` - -## Development - -```bash - # go to director folder - cd /services/director - # install with symlinks - pip install -r requirements-dev.txt -``` - -The director implements a REST API defined in __/src/simcore_service_director/api/v1/openapi.yaml__. -First extend the API and validate the API before implementing any new route. - -## Current status - -End validation of the requests/responses is missing as some issues arose with using the openapi-core library. It seems it is not happy with referencing a json schema file. An issue was filed to see if something may be done quickly [github](https://github.com/p1c2u/openapi-core/issues/90). - -## docker - -- Uses multi-stage dockerfile to extend a common stack of layers into production or development images -- Main difference between development and production stages is whether the code gets copied or not inside of the image -- Development stage is set first to avoid re-building when files are changed -- ``boot.sh`` is necessary to activate the virtual environment inside of the docker - -```bash - - # development image - docker build --target development -t director:dev . - docker run -v %DIRECTOR_SRC_CODE:/home/scu/src director:dev - - # production image - docker build -t director:prod . - # or - docker build --target production -t director:prod . - docker run director:prod - -``` - -### local testing - -Using the main Makefile of the oSparc platform allows for testing the director: - -```bash - # go to root folder - make build-devel - # switch the docker swarm on in development mode - make up-devel -``` - -Then open [director-swagger-ui](http://localhost:8080/apidoc/) to see the director API and try out the different routes. - -## code generation from REST API "server side" - -Execute the following script for generating the necessary code server side - -```bash -./codegen.sh -``` - -NOTE: Issue #3 must still be taken care of manually! - -### Issues - -1. SwaggerRouter must be created with __version_ui__ set to 3 or the swagger ui must be access with ?version=3 -2. SwaggerRouter.include needs to have the argument __basePath__ filled to serve the API at the right location (ndlr /v1) [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) -3. The generated models need to be manually corrected when the properties are __nullable__ as the code generator does add a check for __None__ value that triggers a ValueError exception even though the value is allowed to be null [Python server models generation issue with __nullable: true__ on GitHub](https://github.com/OpenAPITools/openapi-generator/issues/579) From 3b10e76bff67003a5af841a245b9139c33296315 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:57:16 +0100 Subject: [PATCH 007/201] entrpoint/boot --- services/director/docker/boot.sh | 58 ++++++++---- services/director/docker/entrypoint.sh | 124 ++++++++++++------------- 2 files changed, 100 insertions(+), 82 deletions(-) diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index 2a77aa40daa..0d7122a6681 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -6,32 +6,56 @@ IFS=$(printf '\n\t') INFO="INFO: [$(basename "$0")] " -# BOOTING application --------------------------------------------- echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." -echo " User :$(id "$(whoami)")" -echo " Workdir :$(pwd)" +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +# +# DEVELOPMENT MODE +# +# - prints environ info +# - installs requirements in mounted volume +# if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "Environment :" printenv | sed 's/=/: /' | sed 's/^/ /' | sort echo "$INFO" "Python :" python --version | sed 's/^/ /' command -v python | sed 's/^/ /' - cd services/director || exit 1 - # speedup for legacy service with all essential dependencies pinned - # in this case `--no-deps` does the trick, for details see link - # https://stackoverflow.com/a/65793484/2855718 - pip install --no-cache-dir --no-deps -r requirements/dev.txt - cd - || exit 1 - echo "$INFO" "PIP :" - pip list | sed 's/^/ /' + + cd services/autoscaling + uv pip --quiet --no-cache-dir sync requirements/dev.txt + cd - + uv pip list +fi + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + # NOTE: production does NOT pre-installs debugpy + uv pip install --no-cache-dir debugpy fi -# RUNNING application ---------------------------------------- -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then - watchmedo auto-restart --recursive --pattern="*.py;*/src/*" --ignore-patterns="*test*;pytest_simcore/*;setup.py;*ignore*" --ignore-directories -- \ - python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \ - simcore_service_director --loglevel="${LOGLEVEL}" +# +# RUNNING application +# + +APP_LOG_LEVEL=${DIRECTOR_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} +SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') +echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + + exec sh -c " + cd services/autoscaling/src/simcore_service_director && \ + python -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + --host 0.0.0.0 \ + --reload \ + $reload_dir_packages + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " else - exec simcore-service-director --loglevel="${LOGLEVEL}" + exec uvicorn simcore_service_director.main:the_app \ + --host 0.0.0.0 \ + --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director/docker/entrypoint.sh b/services/director/docker/entrypoint.sh index 232da22ba7e..ad982fd8d5c 100755 --- a/services/director/docker/entrypoint.sh +++ b/services/director/docker/entrypoint.sh @@ -1,4 +1,9 @@ #!/bin/sh +# +# - Executes *inside* of the container upon start as --user [default root] +# - Notice that the container *starts* as --user [default root] but +# *runs* as non-root user [scu] +# set -o errexit set -o nounset @@ -10,86 +15,75 @@ ERROR="ERROR: [$(basename "$0")] " # Read self-signed SSH certificates (if applicable) # -# In case the director must access a docker registry in a secure way using +# In case clusters-keeper must access a docker registry in a secure way using # non-standard certificates (e.g. such as self-signed certificates), this call is needed. -# It needs to be executed as root. +# It needs to be executed as root. Also required to any access for example to secure rabbitmq. update-ca-certificates -# This entrypoint script: -# -# - Executes *inside* of the container upon start as --user [default root] -# - Notice that the container *starts* as --user [default root] but -# *runs* as non-root user [scu] -# echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." -echo "$INFO" "User :$(id "$(whoami)")" -echo "$INFO" "Workdir :$(pwd)" -echo scuUser :"$(id scu)" - -if [ "${SC_BUILD_TARGET}" = "development" ] -then - # NOTE: expects docker run ... -v $(pwd):/devel/services/director - DEVEL_MOUNT=/devel/services/director +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +echo "$INFO" "User : $(id scu)" +echo "$INFO" "python : $(command -v python)" +echo "$INFO" "pip : $(command -v pip)" - stat $DEVEL_MOUNT > /dev/null 2>&1 || \ - (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script +# +# DEVELOPMENT MODE +# - expects docker run ... -v $(pwd):$SC_DEVEL_MOUNT +# - mounts source folders +# - deduces host's uid/gip and assigns to user within docker +# +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "development mode detected..." + stat "${SC_DEVEL_MOUNT}" >/dev/null 2>&1 || + (echo "$ERROR" "You must mount '$SC_DEVEL_MOUNT' to deduce user and group ids" && exit 1) - echo "setting correct user id/group id..." - HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}") - HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}") - CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) - if [ "$HOST_USERID" -eq 0 ] - then - echo "Warning: Folder mounted owned by root user... adding $SC_USER_NAME to root..." - adduser "$SC_USER_NAME" root + echo "$INFO" "setting correct user id/group id..." + HOST_USERID=$(stat --format=%u "${SC_DEVEL_MOUNT}") + HOST_GROUPID=$(stat --format=%g "${SC_DEVEL_MOUNT}") + CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) + if [ "$HOST_USERID" -eq 0 ]; then + echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root + else + echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ]; then + echo "$WARNING" "Creating new group grp$SC_USER_NAME" + CONT_GROUPNAME=grp$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" else - echo "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." - # take host's credentials in $SC_USER_NAME - if [ -z "$CONT_GROUPNAME" ] - then - echo "Creating new group my$SC_USER_NAME" - CONT_GROUPNAME=my$SC_USER_NAME - addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" - else - echo "group already exists" - fi - echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..." - adduser "$SC_USER_NAME" "$CONT_GROUPNAME" - - echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" - usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" - - echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around - echo "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + echo "$INFO" "group already exists" fi -fi + echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + echo "$WARNING" "Changing ownership [this could take some time]" + echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ] -then - # NOTE: production does NOT pre-installs ptvsd - python3 -m pip install ptvsd + echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fi fi # Appends docker group if socket is mounted DOCKER_MOUNT=/var/run/docker.sock -if stat $DOCKER_MOUNT > /dev/null 2>&1 -then - echo "$INFO detected docker socket is mounted, adding user to group..." - GROUPID=$(stat --format=%g $DOCKER_MOUNT) - GROUPNAME=scdocker +if stat $DOCKER_MOUNT >/dev/null 2>&1; then + echo "$INFO detected docker socket is mounted, adding user to group..." + GROUPID=$(stat --format=%g $DOCKER_MOUNT) + GROUPNAME=scdocker - if ! addgroup --gid "$GROUPID" $GROUPNAME > /dev/null 2>&1 - then - echo "$WARNING docker group with $GROUPID already exists, getting group name..." - # if group already exists in container, then reuse name - GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) - echo "$WARNING docker group with $GROUPID has name $GROUPNAME" - fi - adduser "$SC_USER_NAME" "$GROUPNAME" + if ! addgroup --gid "$GROUPID" $GROUPNAME >/dev/null 2>&1; then + echo "$WARNING docker group with $GROUPID already exists, getting group name..." + # if group already exists in container, then reuse name + GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) + echo "$WARNING docker group with $GROUPID has name $GROUPNAME" + fi + adduser "$SC_USER_NAME" "$GROUPNAME" fi echo "$INFO Starting $* ..." From 740406a5bb88f52516d99a682f588f3ee6b84183 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 13:37:51 +0100 Subject: [PATCH 008/201] healthcheck --- services/director/docker/healthcheck.py | 33 +++++++++++++------------ 1 file changed, 17 insertions(+), 16 deletions(-) mode change 100644 => 100755 services/director/docker/healthcheck.py diff --git a/services/director/docker/healthcheck.py b/services/director/docker/healthcheck.py old mode 100644 new mode 100755 index b3a1e7e8cad..10e58d00e21 --- a/services/director/docker/healthcheck.py +++ b/services/director/docker/healthcheck.py @@ -8,7 +8,7 @@ --timeout=30s \ --start-period=1s \ --retries=3 \ - CMD python3 docker/healthcheck.py http://localhost:8080/v0/ + CMD python3 docker/healthcheck.py http://localhost:8000/ ``` Q&A: @@ -18,23 +18,24 @@ import os import sys +from contextlib import suppress from urllib.request import urlopen -SUCCESS, UNHEALTHY = 0, 1 +# Disabled if boots with debugger (e.g. debug, pdb-debug, debug-ptvsd, etc) +SC_BOOT_MODE = os.environ.get("SC_BOOT_MODE", "") -# Disabled if boots with debugger -ok = os.environ.get("SC_BOOT_MODE") == "debug" +# Adds a base-path if defined in environ +SIMCORE_NODE_BASEPATH = os.environ.get("SIMCORE_NODE_BASEPATH", "") -# Queries host -# pylint: disable=consider-using-with -ok = ( - ok - or urlopen( - "{host}{baseurl}".format( - host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") - ) # adds a base-path if defined in environ - ).getcode() - == 200 -) -sys.exit(SUCCESS if ok else UNHEALTHY) +def is_service_healthy() -> bool: + if "debug" in SC_BOOT_MODE.lower(): + return True + + with suppress(Exception): + with urlopen(f"{sys.argv[1]}{SIMCORE_NODE_BASEPATH}") as f: + return f.getcode() == 200 + return False + + +sys.exit(os.EX_OK if is_service_healthy() else os.EX_UNAVAILABLE) From a0191e4debb7f2a5014dd558fab313d030319515 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:08:59 +0100 Subject: [PATCH 009/201] base routes --- .../simcore_service_director/api/__init__.py | 0 .../api/rest/__init__.py | 0 .../api/rest/_health.py | 11 +++++ .../api/rest/_running_interactive_services.py | 41 +++++++++++++++++++ .../api/rest/_service_extras.py | 14 +++++++ .../api/rest/_services.py | 30 ++++++++++++++ .../api/rest/routes.py | 26 ++++++++++++ 7 files changed, 122 insertions(+) create mode 100644 services/director/src/simcore_service_director/api/__init__.py create mode 100644 services/director/src/simcore_service_director/api/rest/__init__.py create mode 100644 services/director/src/simcore_service_director/api/rest/_health.py create mode 100644 services/director/src/simcore_service_director/api/rest/_running_interactive_services.py create mode 100644 services/director/src/simcore_service_director/api/rest/_service_extras.py create mode 100644 services/director/src/simcore_service_director/api/rest/_services.py create mode 100644 services/director/src/simcore_service_director/api/rest/routes.py diff --git a/services/director/src/simcore_service_director/api/__init__.py b/services/director/src/simcore_service_director/api/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/api/rest/__init__.py b/services/director/src/simcore_service_director/api/rest/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/api/rest/_health.py b/services/director/src/simcore_service_director/api/rest/_health.py new file mode 100644 index 00000000000..78659b036ae --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_health.py @@ -0,0 +1,11 @@ +import arrow +from fastapi import APIRouter +from fastapi.responses import PlainTextResponse + +router = APIRouter() + + +@router.get("/", include_in_schema=True, response_class=PlainTextResponse) +async def health_check() -> str: + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py new file mode 100644 index 00000000000..1bd53330d30 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -0,0 +1,41 @@ +from pathlib import Path +from uuid import UUID + +import arrow +from fastapi import APIRouter +from models_library.projects import ProjectID +from models_library.services_types import ServiceKey, ServiceVersion +from models_library.users import UserID + +router = APIRouter() + + +@router.get("/running_interactive_services") +async def list_running_services(user_id: UserID, project_id: ProjectID): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.post("/running_interactive_services") +async def start_service( + user_id: UserID, + project_id: ProjectID, + service_key: ServiceKey, + service_uuid: UUID, + service_basepath: Path, + service_tag: ServiceVersion | None = None, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.get("/running_interactive_services/{service_uuid}") +async def get_running_service(service_uuid: UUID): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.delete("/running_interactive_services/{service_uuid}") +async def stop_service(service_uuid: UUID): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py new file mode 100644 index 00000000000..1c30f411e22 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -0,0 +1,14 @@ +import arrow +from fastapi import APIRouter +from models_library.services_types import ServiceKey, ServiceVersion + +router = APIRouter() + + +@router.get("/service_extras/{service_key}/{service_version}") +async def list_service_extras( + service_key: ServiceKey, + service_version: ServiceVersion, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py new file mode 100644 index 00000000000..2bf1b066bf3 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -0,0 +1,30 @@ +import arrow +from fastapi import APIRouter +from models_library.services_enums import ServiceType +from models_library.services_types import ServiceKey, ServiceVersion + +router = APIRouter() + + +@router.get("/services") +async def list_services(service_type: ServiceType | None = None): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.get("/services/{service_key}/{service_version}") +async def get_service( + service_key: ServiceKey, + service_version: ServiceVersion, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.get("/services/{service_key}/{service_version}/labels") +async def list_service_labels( + service_key: ServiceKey, + service_version: ServiceVersion, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py new file mode 100644 index 00000000000..82ccec19302 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -0,0 +1,26 @@ +from fastapi import APIRouter, FastAPI, HTTPException +from servicelib.fastapi.exceptions_utils import ( + handle_errors_as_500, + http_exception_as_json_response, +) + +from .._meta import API_VTAG +from . import _health, _running_interactive_services, _service_extras, _services + + +def setup_api_routes(app: FastAPI): + """ + Composes resources/sub-resources routers + """ + + app.include_router(_health.router, tags=["operations"]) + + # include the rest under /vX + api_router = APIRouter(prefix=f"/{API_VTAG}") + api_router.include_router(_services.router, tags=["services"]) + api_router.include_router(_service_extras.router, tags=["services"]) + api_router.include_router(_running_interactive_services.router, tags=["services"]) + app.include_router(api_router) + + app.add_exception_handler(Exception, handle_errors_as_500) + app.add_exception_handler(HTTPException, http_exception_as_json_response) From 768aa23e4d2938aaba07b2178709569b0173c6f7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:09:15 +0100 Subject: [PATCH 010/201] added meta --- .../src/simcore_service_director/_meta.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 services/director/src/simcore_service_director/_meta.py diff --git a/services/director/src/simcore_service_director/_meta.py b/services/director/src/simcore_service_director/_meta.py new file mode 100644 index 00000000000..bbcd44e8945 --- /dev/null +++ b/services/director/src/simcore_service_director/_meta.py @@ -0,0 +1,43 @@ +""" Application's metadata + +""" + +from typing import Final + +from models_library.basic_types import VersionStr +from packaging.version import Version +from servicelib.utils_meta import PackageInfo + +info: Final = PackageInfo(package_name="simcore-service-director") +__version__: Final[VersionStr] = info.__version__ + + +PROJECT_NAME: Final[str] = info.project_name +VERSION: Final[Version] = info.version +API_VERSION: Final[VersionStr] = info.__version__ +APP_NAME: Final[str] = PROJECT_NAME +API_VTAG: Final[str] = info.api_prefix_path_tag +SUMMARY: Final[str] = info.get_summary() + + +# NOTE: https://patorjk.com/software/taag/#p=display&f=Electronic&t=Director-v0 +APP_STARTED_BANNER_MSG = r""" + + ▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄ ▄ ▄▄▄▄▄▄▄▄▄ +▐░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐░▌ ▐░▌▐░░░░░░░░░▌ +▐░█▀▀▀▀▀▀▀█░▌▀▀▀▀█░█▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▀▀▀▀█░█▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀█░▌ ▐░▌ ▐░▌▐░█░█▀▀▀▀▀█░▌ +▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░█▄▄▄▄▄▄▄█░▌ ▄▄▄▄▄▄▄▄▄▄▄▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░█▀▀▀▀█░█▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░█▀▀▀▀█░█▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ +▐░█▄▄▄▄▄▄▄█░▌▄▄▄▄█░█▄▄▄▄ ▐░▌ ▐░▌ ▐░█▄▄▄▄▄▄▄▄▄ ▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌▐░▌ ▐░▌ ▐░▐░▌ ▐░█▄▄▄▄▄█░█░▌ +▐░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐░▌ ▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░░░░░░░░░▌ + ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀ + {} +""".format( + f"v{__version__}" +) + + +APP_FINISHED_BANNER_MSG = info.get_finished_banner() From 67673461bb0ce507a2eb67cc1a6e90dda9bd66c9 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:22:04 +0100 Subject: [PATCH 011/201] added core --- .../simcore_service_director/core/__init__.py | 0 .../core/application.py | 64 ++++++++++++++++++ .../simcore_service_director/core/errors.py | 14 ++++ .../simcore_service_director/core/settings.py | 66 +++++++++++++++++++ 4 files changed, 144 insertions(+) create mode 100644 services/director/src/simcore_service_director/core/__init__.py create mode 100644 services/director/src/simcore_service_director/core/application.py create mode 100644 services/director/src/simcore_service_director/core/errors.py create mode 100644 services/director/src/simcore_service_director/core/settings.py diff --git a/services/director/src/simcore_service_director/core/__init__.py b/services/director/src/simcore_service_director/core/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py new file mode 100644 index 00000000000..f75f0c35336 --- /dev/null +++ b/services/director/src/simcore_service_director/core/application.py @@ -0,0 +1,64 @@ +import logging +from typing import Final + +from fastapi import FastAPI +from servicelib.fastapi.tracing import setup_tracing + +from .._meta import ( + API_VERSION, + API_VTAG, + APP_FINISHED_BANNER_MSG, + APP_NAME, + APP_STARTED_BANNER_MSG, +) +from ..api.rest.routes import setup_api_routes +from .settings import ApplicationSettings + +_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR +_NOISY_LOGGERS: Final[tuple[str]] = ("werkzeug",) + +logger = logging.getLogger(__name__) + + +def create_app(settings: ApplicationSettings) -> FastAPI: + # keep mostly quiet noisy loggers + quiet_level: int = max( + min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + ) + for name in _NOISY_LOGGERS: + logging.getLogger(name).setLevel(quiet_level) + + logger.info("app settings: %s", settings.json(indent=1)) + + app = FastAPI( + debug=settings.DIRECTOR_DEBUG, + title=APP_NAME, + description="Director-v0 service", + version=API_VERSION, + openapi_url=f"/api/{API_VTAG}/openapi.json", + docs_url="/dev/doc", + redoc_url=None, # default disabled + ) + # STATE + app.state.settings = settings + assert app.state.settings.API_VERSION == API_VERSION # nosec + + # PLUGINS SETUP + setup_api_routes(app) + + if app.state.settings.DIRECTOR_TRACING: + setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + + # ERROR HANDLERS + + # EVENTS + async def _on_startup() -> None: + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + + async def _on_shutdown() -> None: + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + return app diff --git a/services/director/src/simcore_service_director/core/errors.py b/services/director/src/simcore_service_director/core/errors.py new file mode 100644 index 00000000000..2664f266da7 --- /dev/null +++ b/services/director/src/simcore_service_director/core/errors.py @@ -0,0 +1,14 @@ +from typing import Any + +from models_library.errors_classes import OsparcErrorMixin + + +class DirectorRuntimeError(OsparcErrorMixin, RuntimeError): + def __init__(self, **ctx: Any) -> None: + super().__init__(**ctx) + + msg_template: str = "Director-v0 unexpected error" + + +class ConfigurationError(DirectorRuntimeError): + msg_template: str = "Application misconfiguration: {msg}" diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py new file mode 100644 index 00000000000..e1bd33985be --- /dev/null +++ b/services/director/src/simcore_service_director/core/settings.py @@ -0,0 +1,66 @@ +from models_library.basic_types import ( + BootModeEnum, + BuildTargetEnum, + LogLevel, + PortInt, + VersionTag, +) +from pydantic import Field, PositiveInt +from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from settings_library.base import BaseCustomSettings +from settings_library.tracing import TracingSettings +from settings_library.utils_logging import MixinLoggingSettings + +from .._meta import API_VERSION, API_VTAG, APP_NAME + + +class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): + # CODE STATICS --------------------------------------------------------- + API_VERSION: str = API_VERSION + APP_NAME: str = APP_NAME + API_VTAG: VersionTag = API_VTAG + + # IMAGE BUILDTIME ------------------------------------------------------ + # @Makefile + SC_BUILD_DATE: str | None = None + SC_BUILD_TARGET: BuildTargetEnum | None = None + SC_VCS_REF: str | None = None + SC_VCS_URL: str | None = None + + # @Dockerfile + SC_BOOT_MODE: BootModeEnum | None = None + SC_BOOT_TARGET: BuildTargetEnum | None = None + SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( + None, + description="If a single run of the check takes longer than timeout seconds " + "then the check is considered to have failed." + "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", + ) + SC_USER_ID: int | None = None + SC_USER_NAME: str | None = None + + # RUNTIME ----------------------------------------------------------- + DIRECTOR_DEBUG: bool = Field( + default=False, description="Debug mode", env=["DIRECTOR_DEBUG", "DEBUG"] + ) + DIRECTOR_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) + + DIRECTOR_LOGLEVEL: LogLevel = Field( + LogLevel.INFO, env=["DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + ) + DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( + default=False, + env=[ + "DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ], + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ) + DIRECTOR_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( + default_factory=dict, + env=["DIRECTOR_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"], + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ) + DIRECTOR_TRACING: TracingSettings | None = Field( + auto_default_from_env=True, description="settings for opentelemetry tracing" + ) From 4d5d4aaeb0e67beb73078ca2077b2bef33efb79a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:22:19 +0100 Subject: [PATCH 012/201] modified meta --- services/director/src/simcore_service_director/_meta.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/_meta.py b/services/director/src/simcore_service_director/_meta.py index bbcd44e8945..5bf4218d678 100644 --- a/services/director/src/simcore_service_director/_meta.py +++ b/services/director/src/simcore_service_director/_meta.py @@ -4,7 +4,7 @@ from typing import Final -from models_library.basic_types import VersionStr +from models_library.basic_types import VersionStr, VersionTag from packaging.version import Version from servicelib.utils_meta import PackageInfo @@ -16,7 +16,7 @@ VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ APP_NAME: Final[str] = PROJECT_NAME -API_VTAG: Final[str] = info.api_prefix_path_tag +API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) SUMMARY: Final[str] = info.get_summary() From 76187a3a0f90a60cae13fa03194d185d28f3b6d5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:22:37 +0100 Subject: [PATCH 013/201] added cli --- .../director/src/simcore_service_director/cli.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 services/director/src/simcore_service_director/cli.py diff --git a/services/director/src/simcore_service_director/cli.py b/services/director/src/simcore_service_director/cli.py new file mode 100644 index 00000000000..4b6beb2a800 --- /dev/null +++ b/services/director/src/simcore_service_director/cli.py @@ -0,0 +1,16 @@ +import logging + +import typer +from settings_library.utils_cli import create_settings_command, create_version_callback + +from ._meta import PROJECT_NAME, __version__ +from .core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + +main = typer.Typer(name=PROJECT_NAME) + +main.command()( + create_settings_command(settings_cls=ApplicationSettings, logger=_logger) +) +main.callback()(create_version_callback(__version__)) From 30ef4caf9af2c912d89155b8a0c5f23594ad49bc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:24:26 +0100 Subject: [PATCH 014/201] replaced main --- .../src/simcore_service_director/main.py | 53 ++++++------------- .../src/simcore_service_director/main_old.py | 41 ++++++++++++++ 2 files changed, 58 insertions(+), 36 deletions(-) create mode 100644 services/director/src/simcore_service_director/main_old.py diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index 0bf6edccc57..173c3e6c9c7 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,42 +1,23 @@ -#!/usr/bin/env python3 -import logging - -from aiohttp import web - -# NOTE: notice that servicelib is frozen to c8669fb52659b684514fefa4f3b4599f57f276a0 -# pylint: disable=no-name-in-module -from servicelib.client_session import persistent_client_session -from simcore_service_director import registry_cache_task, resources -from simcore_service_director.monitoring import setup_app_monitoring -from simcore_service_director.rest import routing - -from .registry_proxy import setup_registry - -log = logging.getLogger(__name__) - +"""Main application to be deployed by uvicorn (or equivalent) server -def setup_app() -> web.Application: - api_spec_path = resources.get_path(resources.RESOURCE_OPEN_API) - app = routing.create_web_app(api_spec_path.parent, api_spec_path.name) +""" - # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed - app.cleanup_ctx.append(persistent_client_session) - app.cleanup_ctx.append(setup_registry) - - registry_cache_task.setup(app) - - setup_app_monitoring(app, "simcore_service_director") - - # NOTE: removed tracing from director. Users old version of servicelib and - # in any case this service will be completely replaced - - return app +import logging +from fastapi import FastAPI +from servicelib.logging_utils import config_all_loggers +from simcore_service_director.core.application import create_app +from simcore_service_director.core.settings import ApplicationSettings -def main() -> None: - app = setup_app() - web.run_app(app, port=8080) +_the_settings = ApplicationSettings.create_from_envs() +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_the_settings.log_level) # NOSONAR +logging.root.setLevel(_the_settings.log_level) +config_all_loggers( + log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, +) -if __name__ == "__main__": - main() +# SINGLETON FastAPI app +the_app: FastAPI = create_app(_the_settings) diff --git a/services/director/src/simcore_service_director/main_old.py b/services/director/src/simcore_service_director/main_old.py new file mode 100644 index 00000000000..39a6c0dfc38 --- /dev/null +++ b/services/director/src/simcore_service_director/main_old.py @@ -0,0 +1,41 @@ +import logging + +from aiohttp import web + +# NOTE: notice that servicelib is frozen to c8669fb52659b684514fefa4f3b4599f57f276a0 +# pylint: disable=no-name-in-module +from servicelib.client_session import persistent_client_session +from simcore_service_director import registry_cache_task, resources +from simcore_service_director.monitoring import setup_app_monitoring +from simcore_service_director.rest import routing + +from .registry_proxy import setup_registry + +log = logging.getLogger(__name__) + + +def setup_app() -> web.Application: + api_spec_path = resources.get_path(resources.RESOURCE_OPEN_API) + app = routing.create_web_app(api_spec_path.parent, api_spec_path.name) + + # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed + app.cleanup_ctx.append(persistent_client_session) + app.cleanup_ctx.append(setup_registry) + + registry_cache_task.setup(app) + + setup_app_monitoring(app, "simcore_service_director") + + # NOTE: removed tracing from director. Users old version of servicelib and + # in any case this service will be completely replaced + + return app + + +def main() -> None: + app = setup_app() + web.run_app(app, port=8080) + + +if __name__ == "__main__": + main() From 86e13cc5eb7270a48bbacf92d1db8cd15c1b2ed8 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:31:51 +0100 Subject: [PATCH 015/201] conftest --- services/director/tests/conftest.py | 117 ++++++++++++++++------------ 1 file changed, 66 insertions(+), 51 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index eecb693e0de..f59bb2b5fe3 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -8,75 +8,77 @@ import pytest import simcore_service_director -from aiohttp import ClientSession +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director import config, resources -# NOTE: that all the changes in these pytest-plugins MUST by py3.6 compatible! pytest_plugins = [ - "fixtures.fake_services", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", + "pytest_simcore.environment_configs", + "pytest_simcore.fakers_projects_data", + "pytest_simcore.fakers_users_data", "pytest_simcore.repository_paths", - "pytest_simcore.pytest_global_environs", ] -@pytest.fixture -def configure_swarm_stack_name(): - config.SWARM_STACK_NAME = "test_stack" +def pytest_addoption(parser): + parser.addoption("--registry_url", action="store", default="default url") + parser.addoption("--registry_user", action="store", default="default user") + parser.addoption("--registry_pw", action="store", default="default pw") @pytest.fixture(scope="session") -def common_schemas_specs_dir(osparc_simcore_root_dir): - specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" - assert specs_dir.exists() - return specs_dir +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: + # fixtures in pytest_simcore.environs + service_folder = osparc_simcore_root_dir / "services" / "director" + assert service_folder.exists() + assert any(service_folder.glob("src/simcore_service_director")) + return service_folder @pytest.fixture(scope="session") -def package_dir(): +def installed_package_dir() -> Path: dirpath = Path(simcore_service_director.__file__).resolve().parent assert dirpath.exists() return dirpath +@pytest.fixture(scope="session") +def common_schemas_specs_dir(osparc_simcore_root_dir: Path): + specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" + assert specs_dir.exists() + return specs_dir + + @pytest.fixture -def configure_schemas_location(package_dir, common_schemas_specs_dir): +def configure_schemas_location( + installed_package_dir: Path, common_schemas_specs_dir: Path +): config.NODE_SCHEMA_LOCATION = str( common_schemas_specs_dir / "node-meta-v0.0.1.json" ) resources.RESOURCE_NODE_SCHEMA = os.path.relpath( - config.NODE_SCHEMA_LOCATION, package_dir + config.NODE_SCHEMA_LOCATION, installed_package_dir ) +@pytest.fixture(scope="session") +def configure_swarm_stack_name(): + config.SWARM_STACK_NAME = "test_stack" + + @pytest.fixture -def configure_registry_access(docker_registry): +def configure_registry_access(docker_registry: str): config.REGISTRY_URL = docker_registry config.REGISTRY_PATH = docker_registry config.REGISTRY_SSL = False config.DIRECTOR_REGISTRY_CACHING = False -@pytest.fixture -def user_id(): - yield "some_user_id" - - -@pytest.fixture -def project_id(): - yield "some_project_id" - - -def pytest_addoption(parser): - parser.addoption("--registry_url", action="store", default="default url") - parser.addoption("--registry_user", action="store", default="default user") - parser.addoption("--registry_pw", action="store", default="default pw") - - @pytest.fixture(scope="session") -def configure_custom_registry(pytestconfig): +def configure_custom_registry(pytestconfig: pytest.Config): # to set these values call # pytest --registry_url myregistry --registry_user username --registry_pw password config.REGISTRY_URL = pytestconfig.getoption("registry_url") @@ -87,29 +89,42 @@ def configure_custom_registry(pytestconfig): @pytest.fixture -async def aiohttp_mock_app(loop, mocker): - print("client session started ...") - session = ClientSession() +def api_version_prefix() -> str: + assert "v0" in resources.listdir(resources.RESOURCE_OPENAPI_ROOT) + return "v0" - mock_app_storage = { - config.APP_CLIENT_SESSION_KEY: session, - config.APP_REGISTRY_CACHE_DATA_KEY: {}, - } - def _get_item(self, key): - return mock_app_storage[key] +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_environment_dict: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **docker_compose_service_environment_dict, + # ADD here env-var overrides + }, + ) - aiohttp_app = mocker.patch("aiohttp.web.Application") - aiohttp_app.__getitem__ = _get_item - yield aiohttp_app +# @pytest.fixture +# async def aiohttp_mock_app(loop, mocker): +# print("client session started ...") +# session = ClientSession() - # cleanup session - await session.close() - print("client session closed") +# mock_app_storage = { +# config.APP_CLIENT_SESSION_KEY: session, +# config.APP_REGISTRY_CACHE_DATA_KEY: {}, +# } +# def _get_item(self, key): +# return mock_app_storage[key] -@pytest.fixture -def api_version_prefix() -> str: - assert "v0" in resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - return "v0" +# aiohttp_app = mocker.patch("aiohttp.web.Application") +# aiohttp_app.__getitem__ = _get_item + +# yield aiohttp_app + +# # cleanup session +# await session.close() From 6a4bf59abd2175f56b12a41e79e46e03db4a0485 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:33:43 +0100 Subject: [PATCH 016/201] ruffed --- .../src/simcore_service_director/config.py | 13 ++++---- .../director/tests/fixtures/fake_services.py | 6 ++-- services/director/tests/test_docker_utils.py | 6 ++-- .../director/tests/test_dummy_services.py | 2 +- services/director/tests/test_handlers.py | 31 +++++++------------ services/director/tests/test_json_schemas.py | 1 - services/director/tests/test_oas.py | 1 - .../director/tests/test_openapi_schemas.py | 20 +++++------- services/director/tests/test_producer.py | 4 +-- .../tests/test_registry_cache_task.py | 2 +- .../director/tests/test_registry_proxy.py | 18 +++++------ services/director/tests/test_utils.py | 4 +-- 12 files changed, 45 insertions(+), 63 deletions(-) diff --git a/services/director/src/simcore_service_director/config.py b/services/director/src/simcore_service_director/config.py index 67a15cb05ac..090a506adb0 100644 --- a/services/director/src/simcore_service_director/config.py +++ b/services/director/src/simcore_service_director/config.py @@ -6,9 +6,8 @@ import os import warnings from distutils.util import strtobool -from typing import Dict, Optional -from servicelib.client_session import ( # pylint: disable=no-name-in-module +from servicelib.aiohttp.application_keys import ( # pylint: disable=no-name-in-module APP_CLIENT_SESSION_KEY, ) @@ -63,11 +62,11 @@ def _from_env_with_default(env: str, python_type, default): ) -def _parse_placement_substitutions() -> Dict[str, str]: +def _parse_placement_substitutions() -> dict[str, str]: str_env_var: str = os.environ.get( "DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", "{}" ) - result: Dict[str, str] = json.loads(str_env_var) + result: dict[str, str] = json.loads(str_env_var) if len(result) > 0: warnings.warn( # noqa: B028 @@ -83,7 +82,7 @@ def _parse_placement_substitutions() -> Dict[str, str]: return result -DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: Dict[ +DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ str, str ] = _parse_placement_substitutions() @@ -126,7 +125,7 @@ def _parse_placement_substitutions() -> Dict[str, str]: EXTRA_HOSTS_SUFFIX: str = os.environ.get("EXTRA_HOSTS_SUFFIX", "undefined") # these are the envs passed to the dynamic services by default -SERVICES_DEFAULT_ENVS: Dict[str, str] = { +SERVICES_DEFAULT_ENVS: dict[str, str] = { "POSTGRES_ENDPOINT": os.environ.get( "POSTGRES_ENDPOINT", "undefined postgres endpoint" ), @@ -151,7 +150,7 @@ def _parse_placement_substitutions() -> Dict[str, str]: "NODE_SCHEMA_LOCATION", f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json" ) # used to find the right network name -SIMCORE_SERVICES_NETWORK_NAME: Optional[str] = os.environ.get( +SIMCORE_SERVICES_NETWORK_NAME: str | None = os.environ.get( "SIMCORE_SERVICES_NETWORK_NAME" ) # useful when developing with an alternative registry namespace diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index e58f547f729..b709e422271 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -19,7 +19,7 @@ _logger = logging.getLogger(__name__) -@pytest.fixture(scope="function") +@pytest.fixture() def push_services(docker_registry, tmpdir): registry_url = docker_registry tmp_dir = Path(tmpdir) @@ -50,7 +50,7 @@ async def build_push_images( images_to_build = [] - for image_index in range(0, number_of_computational_services): + for image_index in range(number_of_computational_services): images_to_build.append( _build_push_image( tmp_dir, @@ -63,7 +63,7 @@ async def build_push_images( ) ) - for image_index in range(0, number_of_interactive_services): + for image_index in range(number_of_interactive_services): images_to_build.append( _build_push_image( tmp_dir, diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/test_docker_utils.py index f6cce146e4b..76b4abd2051 100644 --- a/services/director/tests/test_docker_utils.py +++ b/services/director/tests/test_docker_utils.py @@ -50,11 +50,11 @@ async def test_swarm_get_number_nodes(docker_swarm): async def test_swarm_has_manager_nodes(docker_swarm): - assert (await docker_utils.swarm_has_manager_nodes()) == True + assert (await docker_utils.swarm_has_manager_nodes()) is True async def test_swarm_has_worker_nodes(docker_swarm): - assert (await docker_utils.swarm_has_worker_nodes()) == False + assert (await docker_utils.swarm_has_worker_nodes()) is False async def test_push_services( @@ -62,6 +62,6 @@ async def test_push_services( configure_registry_access, configure_schemas_location, ): - images = await push_services( + await push_services( number_of_computational_services=3, number_of_interactive_services=3 ) diff --git a/services/director/tests/test_dummy_services.py b/services/director/tests/test_dummy_services.py index f38cb848b22..5427fd178c7 100644 --- a/services/director/tests/test_dummy_services.py +++ b/services/director/tests/test_dummy_services.py @@ -3,9 +3,9 @@ # pylint: disable=bare-except # pylint:disable=redefined-outer-name -import pytest import json import logging + from helpers import json_schema_validator log = logging.getLogger(__name__) diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index 4d981ede990..3a326ef6ba2 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -7,7 +7,6 @@ import json import uuid -from typing import Optional from urllib.parse import quote import pytest @@ -29,8 +28,7 @@ def client( ): app = main.setup_app() server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - client = loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) - return client + return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) async def test_root_get(client, api_version_prefix): @@ -58,7 +56,7 @@ def _check_services(created_services, services, schema_version="v1"): ] json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() == True + assert json_schema_path.exists() is True with json_schema_path.open() as file_pt: service_schema = json.load(file_pt) @@ -100,7 +98,7 @@ async def test_services_get(docker_registry, client, push_services, api_version_ assert web_response.status == 400 assert web_response.content_type == "application/json" services_enveloped = await web_response.json() - assert not "data" in services_enveloped + assert "data" not in services_enveloped assert "error" in services_enveloped web_response = await client.get( @@ -147,9 +145,9 @@ async def test_services_by_key_version_get( for created_service in created_services: service_description = created_service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ + key, version = ( quote(service_description[key], safe="") for key in ("key", "version") - ] + ) url = f"/{api_version_prefix}/services/{key}/{version}" web_response = await client.get(url) @@ -174,9 +172,9 @@ async def test_get_service_labels( for service in created_services: service_description = service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ + key, version = ( quote(service_description[key], safe="") for key in ("key", "version") - ] + ) url = f"/{api_version_prefix}/services/{key}/{version}/labels" web_response = await client.get(url) assert web_response.status == 200, await web_response.text() @@ -209,9 +207,9 @@ async def test_services_extras_by_key_version_get( for created_service in created_services: service_description = created_service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ + key, version = ( quote(service_description[key], safe="") for key in ("key", "version") - ] + ) url = f"/{api_version_prefix}/service_extras/{key}/{version}" web_response = await client.get(url) @@ -232,7 +230,7 @@ async def _start_get_stop_services( user_id, project_id, api_version_prefix: str, - save_state: Optional[bool], + save_state: bool | None, expected_save_state_call: bool, mocker, ): @@ -422,7 +420,7 @@ async def test_running_services_post_and_delete( user_id, project_id, api_version_prefix, - save_state: Optional[bool], + save_state: bool | None, expected_save_state_call: bool, mocker, ): @@ -536,10 +534,5 @@ async def test_performance_get_services( print("iteration completed in", (time.perf_counter() - start_time_i), "s") stop_time = time.perf_counter() print( - "Time to run {} times: {}s, #services {}, time per call {}s/service".format( - number_of_calls, - stop_time - start_time, - number_of_services, - (stop_time - start_time) / number_of_calls / number_of_services, - ) + f"Time to run {number_of_calls} times: {stop_time - start_time}s, #services {number_of_services}, time per call {(stop_time - start_time) / number_of_calls / number_of_services}s/service" ) diff --git a/services/director/tests/test_json_schemas.py b/services/director/tests/test_json_schemas.py index 6a45b1d0740..37d68c62f09 100644 --- a/services/director/tests/test_json_schemas.py +++ b/services/director/tests/test_json_schemas.py @@ -3,7 +3,6 @@ import pytest from jsonschema import SchemaError, ValidationError, validate - from simcore_service_director import resources API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) diff --git a/services/director/tests/test_oas.py b/services/director/tests/test_oas.py index 86898604fa4..c62080a2391 100644 --- a/services/director/tests/test_oas.py +++ b/services/director/tests/test_oas.py @@ -4,7 +4,6 @@ import yaml from openapi_spec_validator import validate_spec from openapi_spec_validator.exceptions import OpenAPIValidationError - from simcore_service_director import resources diff --git a/services/director/tests/test_openapi_schemas.py b/services/director/tests/test_openapi_schemas.py index 7849534fbcf..712ce3510ac 100644 --- a/services/director/tests/test_openapi_schemas.py +++ b/services/director/tests/test_openapi_schemas.py @@ -2,10 +2,8 @@ import pytest import yaml - from openapi_spec_validator import validate_spec from openapi_spec_validator.exceptions import OpenAPIValidationError - from simcore_service_director import resources API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) @@ -15,19 +13,17 @@ def correct_schema_local_references(schema_specs): for key, value in schema_specs.items(): if isinstance(value, dict): correct_schema_local_references(value) - elif "$ref" in key: - if str(value).startswith("#/"): - # correct the reference - new_value = str(value).replace("#/", "#/components/schemas/") - schema_specs[key] = new_value + elif "$ref" in key and str(value).startswith("#/"): + # correct the reference + new_value = str(value).replace("#/", "#/components/schemas/") + schema_specs[key] = new_value def add_namespace_for_converted_schemas(schema_specs): # schemas converted from jsonschema do not have an overarching namespace. # the openapi validator does not like this # we use the jsonschema title to create a fake namespace - fake_schema_specs = {"FakeName": schema_specs} - return fake_schema_specs + return {"FakeName": schema_specs} def validate_individual_schemas(list_of_paths): @@ -44,7 +40,7 @@ def validate_individual_schemas(list_of_paths): for spec_file_path in list_of_paths: assert spec_file_path.exists() # only consider schemas - if not "openapi.yaml" in str(spec_file_path.name) and "schemas" in str( + if "openapi.yaml" not in str(spec_file_path.name) and "schemas" in str( spec_file_path ): with spec_file_path.open() as file_ptr: @@ -62,9 +58,7 @@ def validate_individual_schemas(list_of_paths): @pytest.mark.parametrize("version", API_VERSIONS) def test_valid_individual_openapi_schemas_specs(version): - name = "{root}/{version}/schemas".format( - root=resources.RESOURCE_OPENAPI_ROOT, version=version - ) + name = f"{resources.RESOURCE_OPENAPI_ROOT}/{version}/schemas" schemas_folder_path = resources.get_path(name) validate_individual_schemas(Path(schemas_folder_path).rglob("*.yaml")) validate_individual_schemas(Path(schemas_folder_path).rglob("*.yml")) diff --git a/services/director/tests/test_producer.py b/services/director/tests/test_producer.py index e8fcc4a6fdb..620e624a663 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/test_producer.py @@ -6,8 +6,8 @@ import json import uuid +from collections.abc import Callable from dataclasses import dataclass -from typing import Callable import docker import pytest @@ -158,7 +158,7 @@ async def test_find_service_tag(): ) assert latest_version == "2.11.0" # get a specific version - version = await producer._find_service_tag(list_of_images, my_service_key, "1.2.3") + await producer._find_service_tag(list_of_images, my_service_key, "1.2.3") async def test_start_stop_service(docker_network, run_services): diff --git a/services/director/tests/test_registry_cache_task.py b/services/director/tests/test_registry_cache_task.py index 056462b9199..e0272798204 100644 --- a/services/director/tests/test_registry_cache_task.py +++ b/services/director/tests/test_registry_cache_task.py @@ -22,7 +22,7 @@ def client( registry_cache_task.setup(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) + return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) async def test_registry_caching_task(client, push_services): diff --git a/services/director/tests/test_registry_proxy.py b/services/director/tests/test_registry_proxy.py index dec9ee43708..f59cb2c428f 100644 --- a/services/director/tests/test_registry_proxy.py +++ b/services/director/tests/test_registry_proxy.py @@ -5,7 +5,6 @@ import time import pytest - from simcore_service_director import config, registry_proxy @@ -133,10 +132,12 @@ async def test_list_interactive_service_dependencies( docker_dependencies = json.loads( docker_labels["simcore.service.dependencies"] ) - image_dependencies = await registry_proxy.list_interactive_service_dependencies( - aiohttp_mock_app, - service_description["key"], - service_description["version"], + image_dependencies = ( + await registry_proxy.list_interactive_service_dependencies( + aiohttp_mock_app, + service_description["key"], + service_description["version"], + ) ) assert isinstance(image_dependencies, list) assert len(image_dependencies) == len(docker_dependencies) @@ -180,6 +181,7 @@ async def test_get_image_labels( assert image_manifest_digest not in images_digests images_digests.add(image_manifest_digest) + def test_get_service_first_name(): repo = "simcore/services/dynamic/myservice/modeler/my-sub-modeler" assert registry_proxy.get_service_first_name(repo) == "myservice" @@ -273,11 +275,7 @@ async def test_get_services_performance( ) stop_time = time.perf_counter() print( - "\nTime to run getting services: {}s, #services {}, time per call {}s/service".format( - stop_time - start_time, - len(services), - (stop_time - start_time) / len(services), - ) + f"\nTime to run getting services: {stop_time - start_time}s, #services {len(services)}, time per call {(stop_time - start_time) / len(services)}s/service" ) diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py index 3141d2f2baa..8eeb7b410bd 100644 --- a/services/director/tests/test_utils.py +++ b/services/director/tests/test_utils.py @@ -6,7 +6,7 @@ @pytest.mark.parametrize( "timestr", - ( + [ # Samples taken from https://docs.docker.com/engine/reference/commandline/service_inspect/ "2020-10-09T18:44:02.558012087Z", "2020-10-09T12:28:14.771034099Z", @@ -14,7 +14,7 @@ # found cases with spaces "2020-10-09T12:28:14.77 Z", " 2020-10-09T12:28:14.77 ", - ), + ], ) def test_parse_valid_time_strings(timestr): From 83dd0d94d819ab7674b386fcb4d57b991df8e967 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:37:58 +0100 Subject: [PATCH 017/201] aync tests --- services/director/setup.cfg | 11 +++++++++++ services/director/tests/conftest.py | 4 ++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 1eb089c0af8..eb3d7554b27 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -6,3 +6,14 @@ tag = False commit_args = --no-verify [bumpversion:file:VERSION] + + +[tool:pytest] +asyncio_mode = auto +markers = + testit: "marks test to run during development" + + +[mypy] +plugins = + pydantic.mypy diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index f59bb2b5fe3..d0275e85a03 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -17,8 +17,8 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", - "pytest_simcore.fakers_projects_data", - "pytest_simcore.fakers_users_data", + "pytest_simcore.faker_projects_data", + "pytest_simcore.faker_users_data", "pytest_simcore.repository_paths", ] From f4831a56783c6d841ed38e371a966b0115c7e9ef Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:53:16 +0100 Subject: [PATCH 018/201] test_docker_utils runs --- .../simcore_service_director/docker_utils.py | 19 ++++++++----------- services/director/tests/conftest.py | 5 +++-- .../director/tests/fixtures/fake_services.py | 11 ++++++++--- services/director/tests/test_docker_utils.py | 15 ++++++++------- 4 files changed, 27 insertions(+), 23 deletions(-) diff --git a/services/director/src/simcore_service_director/docker_utils.py b/services/director/src/simcore_service_director/docker_utils.py index 56dfba1bc3a..0c6b8a1a363 100644 --- a/services/director/src/simcore_service_director/docker_utils.py +++ b/services/director/src/simcore_service_director/docker_utils.py @@ -1,18 +1,19 @@ import logging +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager import aiodocker -from asyncio_extras import async_contextmanager -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -@async_contextmanager -async def docker_client() -> aiodocker.docker.Docker: +@asynccontextmanager +async def docker_client() -> AsyncIterator[aiodocker.docker.Docker]: try: client = aiodocker.Docker() yield client except aiodocker.exceptions.DockerError: - log.exception(msg="Unexpected error with docker client") + _logger.exception(msg="Unexpected error with docker client") raise finally: await client.close() @@ -27,14 +28,10 @@ async def swarm_get_number_nodes() -> int: async def swarm_has_manager_nodes() -> bool: async with docker_client() as client: # pylint: disable=not-async-context-manager nodes = await client.nodes.list(filters={"role": "manager"}) - if nodes: - return True - return False + return bool(nodes) async def swarm_has_worker_nodes() -> bool: async with docker_client() as client: # pylint: disable=not-async-context-manager nodes = await client.nodes.list(filters={"role": "worker"}) - if nodes: - return True - return False + return bool(nodes) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index d0275e85a03..6ef70c76e00 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -13,6 +13,7 @@ from simcore_service_director import config, resources pytest_plugins = [ + "fixtures.fake_services", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", @@ -70,7 +71,7 @@ def configure_swarm_stack_name(): @pytest.fixture -def configure_registry_access(docker_registry: str): +def configure_registry_access(docker_registry: str) -> None: config.REGISTRY_URL = docker_registry config.REGISTRY_PATH = docker_registry config.REGISTRY_SSL = False @@ -78,7 +79,7 @@ def configure_registry_access(docker_registry: str): @pytest.fixture(scope="session") -def configure_custom_registry(pytestconfig: pytest.Config): +def configure_custom_registry(pytestconfig: pytest.Config) -> None: # to set these values call # pytest --registry_url myregistry --registry_user username --registry_pw password config.REGISTRY_URL = pytestconfig.getoption("registry_url") diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index b709e422271..f2954c3c469 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -84,6 +84,7 @@ async def build_push_images( return list_of_pushed_images_tags yield build_push_images + _logger.info("clean registry") _clean_registry(registry_url, list_of_pushed_images_tags) _clean_registry(registry_url, dependent_images) @@ -185,7 +186,7 @@ def _clean_registry(registry_url, list_of_images): url = "http://{host}/v2/{name}/manifests/{tag}".format( host=registry_url, name=service_description["key"], tag=tag ) - response = requests.get(url, headers=request_headers) + response = requests.get(url, headers=request_headers, timeout=10) docker_content_digest = response.headers["Docker-Content-Digest"] # remove the image from the registry url = "http://{host}/v2/{name}/manifests/{digest}".format( @@ -193,7 +194,7 @@ def _clean_registry(registry_url, list_of_images): name=service_description["key"], digest=docker_content_digest, ) - response = requests.delete(url, headers=request_headers) + response = requests.delete(url, headers=request_headers, timeout=5) async def _create_base_image(labels, tag): @@ -223,7 +224,11 @@ def _create_service_description(service_type, name, tag): service_key_type = "comp" elif service_type == "dynamic": service_key_type = "dynamic" - service_desc["key"] = "simcore/services/" + service_key_type + "/" + name + else: + msg = f"Invalid {service_type=}" + raise ValueError(msg) + + service_desc["key"] = f"simcore/services/{service_key_type}/{name}" service_desc["version"] = tag service_desc["type"] = service_type diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/test_docker_utils.py index 76b4abd2051..3c9180f88cb 100644 --- a/services/director/tests/test_docker_utils.py +++ b/services/director/tests/test_docker_utils.py @@ -4,6 +4,7 @@ # pylint:disable=too-many-arguments # pylint: disable=not-async-context-manager from asyncio import sleep +from collections.abc import Callable import pytest from aiodocker.exceptions import DockerError @@ -37,30 +38,30 @@ async def test_docker_client(): (docker_utils.swarm_has_worker_nodes), ], ) -async def test_swarm_method_with_no_swarm(fct): +async def test_swarm_method_with_no_swarm(fct: Callable): # if this fails on your development machine run # `docker swarm leave --force` to leave the swarm with pytest.raises(DockerError): await fct() -async def test_swarm_get_number_nodes(docker_swarm): +async def test_swarm_get_number_nodes(docker_swarm: None): num_nodes = await docker_utils.swarm_get_number_nodes() assert num_nodes == 1 -async def test_swarm_has_manager_nodes(docker_swarm): +async def test_swarm_has_manager_nodes(docker_swarm: None): assert (await docker_utils.swarm_has_manager_nodes()) is True -async def test_swarm_has_worker_nodes(docker_swarm): +async def test_swarm_has_worker_nodes(docker_swarm: None): assert (await docker_utils.swarm_has_worker_nodes()) is False async def test_push_services( - push_services, - configure_registry_access, - configure_schemas_location, + push_services: Callable, + configure_registry_access: None, + configure_schemas_location: None, ): await push_services( number_of_computational_services=3, number_of_interactive_services=3 From e6fbfbaf994b801668d63799f3894bdd7066d488 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:56:59 +0100 Subject: [PATCH 019/201] test_settings --- services/director/tests/test_core_settings.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 services/director/tests/test_core_settings.py diff --git a/services/director/tests/test_core_settings.py b/services/director/tests/test_core_settings.py new file mode 100644 index 00000000000..24b07909702 --- /dev/null +++ b/services/director/tests/test_core_settings.py @@ -0,0 +1,24 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director.core.settings import ApplicationSettings + + +def test_valid_web_application_settings(app_environment: EnvVarsDict): + """ + We validate actual envfiles (e.g. repo.config files) by passing them via the CLI + + $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets + $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + + """ + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() + + assert app_environment["DIRECTOR_DEBUG"] == settings.DIRECTOR_DEBUG From b32c6cd53f96abe91f6cde1d9bafa27b206724da Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:58:42 +0100 Subject: [PATCH 020/201] test_utils --- services/director/tests/test_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py index 8eeb7b410bd..c9126ca0a4d 100644 --- a/services/director/tests/test_utils.py +++ b/services/director/tests/test_utils.py @@ -1,5 +1,6 @@ from datetime import datetime +import arrow import pytest from simcore_service_director.utils import parse_as_datetime @@ -26,7 +27,7 @@ def test_parse_valid_time_strings(timestr): def test_parse_invalid_timestr(): - now = datetime.utcnow() + now = arrow.utcnow().datetime invalid_timestr = "2020-10-09T12:28" # w/ default, it should NOT raise From 1c40e4b1f3f81f57a91ababf83cc5c6ba4c454c1 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:00:02 +0100 Subject: [PATCH 021/201] minor --- services/director/tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 6ef70c76e00..f6aefaf293c 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -47,7 +47,7 @@ def installed_package_dir() -> Path: @pytest.fixture(scope="session") -def common_schemas_specs_dir(osparc_simcore_root_dir: Path): +def common_schemas_specs_dir(osparc_simcore_root_dir: Path) -> Path: specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" assert specs_dir.exists() return specs_dir @@ -56,7 +56,7 @@ def common_schemas_specs_dir(osparc_simcore_root_dir: Path): @pytest.fixture def configure_schemas_location( installed_package_dir: Path, common_schemas_specs_dir: Path -): +) -> None: config.NODE_SCHEMA_LOCATION = str( common_schemas_specs_dir / "node-meta-v0.0.1.json" ) @@ -66,7 +66,7 @@ def configure_schemas_location( @pytest.fixture(scope="session") -def configure_swarm_stack_name(): +def configure_swarm_stack_name() -> None: config.SWARM_STACK_NAME = "test_stack" From ebb8ead76e24d04e02dd690e416ba819f049c60e Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:02:06 +0100 Subject: [PATCH 022/201] test_oas --- services/director/tests/conftest.py | 6 +++--- services/director/tests/test_oas.py | 17 ++++------------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index f6aefaf293c..458422c658a 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -1,7 +1,7 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except # pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import os from pathlib import Path diff --git a/services/director/tests/test_oas.py b/services/director/tests/test_oas.py index c62080a2391..186f3819737 100644 --- a/services/director/tests/test_oas.py +++ b/services/director/tests/test_oas.py @@ -1,22 +1,13 @@ # pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + -import pytest import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError from simcore_service_director import resources -def test_openapi_specs(): - openapi_path = resources.get_path(resources.RESOURCE_OPEN_API) - with resources.stream(resources.RESOURCE_OPEN_API) as fh: - specs = yaml.safe_load(fh) - try: - validate_spec(specs, spec_url=openapi_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) - - def test_server_specs(): with resources.stream(resources.RESOURCE_OPEN_API) as fh: specs = yaml.safe_load(fh) From 5ea1a2bff763e41a4ed1e653c96e452ca5abce72 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:02:52 +0100 Subject: [PATCH 023/201] oas is auto-generated now --- .../director/tests/test_dummy_services.py | 4 +- services/director/tests/test_openapi.py | 25 -------- .../director/tests/test_openapi_schemas.py | 64 ------------------- 3 files changed, 2 insertions(+), 91 deletions(-) delete mode 100644 services/director/tests/test_openapi.py delete mode 100644 services/director/tests/test_openapi_schemas.py diff --git a/services/director/tests/test_dummy_services.py b/services/director/tests/test_dummy_services.py index 5427fd178c7..80e142c4601 100644 --- a/services/director/tests/test_dummy_services.py +++ b/services/director/tests/test_dummy_services.py @@ -1,7 +1,7 @@ +# pylint: disable=bare-except +# pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-import -# pylint: disable=bare-except -# pylint:disable=redefined-outer-name import json import logging diff --git a/services/director/tests/test_openapi.py b/services/director/tests/test_openapi.py deleted file mode 100644 index 36b25d16073..00000000000 --- a/services/director/tests/test_openapi.py +++ /dev/null @@ -1,25 +0,0 @@ -from pathlib import Path - -import pkg_resources -import pytest -import simcore_service_director -import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError -from simcore_service_director.resources import RESOURCE_OPEN_API - - -def test_specifications(): - # pylint: disable=no-value-for-parameter - spec_path = Path( - pkg_resources.resource_filename( - simcore_service_director.__name__, RESOURCE_OPEN_API - ) - ) - - with spec_path.open() as fh: - specs = yaml.safe_load(fh) - try: - validate_spec(specs, spec_url=spec_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) diff --git a/services/director/tests/test_openapi_schemas.py b/services/director/tests/test_openapi_schemas.py deleted file mode 100644 index 712ce3510ac..00000000000 --- a/services/director/tests/test_openapi_schemas.py +++ /dev/null @@ -1,64 +0,0 @@ -from pathlib import Path - -import pytest -import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError -from simcore_service_director import resources - -API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - - -def correct_schema_local_references(schema_specs): - for key, value in schema_specs.items(): - if isinstance(value, dict): - correct_schema_local_references(value) - elif "$ref" in key and str(value).startswith("#/"): - # correct the reference - new_value = str(value).replace("#/", "#/components/schemas/") - schema_specs[key] = new_value - - -def add_namespace_for_converted_schemas(schema_specs): - # schemas converted from jsonschema do not have an overarching namespace. - # the openapi validator does not like this - # we use the jsonschema title to create a fake namespace - return {"FakeName": schema_specs} - - -def validate_individual_schemas(list_of_paths): - fake_openapi_headers = { - "openapi": "3.0.0", - "info": { - "title": "An include file to define sortable attributes", - "version": "1.0.0", - }, - "paths": {}, - "components": {"parameters": {}, "schemas": {}}, - } - - for spec_file_path in list_of_paths: - assert spec_file_path.exists() - # only consider schemas - if "openapi.yaml" not in str(spec_file_path.name) and "schemas" in str( - spec_file_path - ): - with spec_file_path.open() as file_ptr: - schema_specs = yaml.safe_load(file_ptr) - # correct local references - correct_schema_local_references(schema_specs) - if str(spec_file_path).endswith("-converted.yaml"): - schema_specs = add_namespace_for_converted_schemas(schema_specs) - fake_openapi_headers["components"]["schemas"] = schema_specs - try: - validate_spec(fake_openapi_headers, spec_url=spec_file_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) - - -@pytest.mark.parametrize("version", API_VERSIONS) -def test_valid_individual_openapi_schemas_specs(version): - name = f"{resources.RESOURCE_OPENAPI_ROOT}/{version}/schemas" - schemas_folder_path = resources.get_path(name) - validate_individual_schemas(Path(schemas_folder_path).rglob("*.yaml")) - validate_individual_schemas(Path(schemas_folder_path).rglob("*.yml")) From e79bb34d0ceca5e250f0538459ab4f1d783afa25 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:15:30 +0100 Subject: [PATCH 024/201] moved config to settings/constants --- .../src/simcore_service_director/constants.py | 19 +++ .../simcore_service_director/core/settings.py | 113 +++++++++++++++++- 2 files changed, 131 insertions(+), 1 deletion(-) create mode 100644 services/director/src/simcore_service_director/constants.py diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py new file mode 100644 index 00000000000..8ee614e5f8e --- /dev/null +++ b/services/director/src/simcore_service_director/constants.py @@ -0,0 +1,19 @@ +from typing import Final + +SERVICE_RUNTIME_SETTINGS: Final[str] = "simcore.service.settings" +SERVICE_REVERSE_PROXY_SETTINGS: Final[str] = "simcore.service.reverse-proxy-settings" +SERVICE_RUNTIME_BOOTSETTINGS: Final[str] = "simcore.service.bootsettings" + +ORG_LABELS_TO_SCHEMA_LABELS: Final[dict[str, str]] = { + "org.label-schema.build-date": "build_date", + "org.label-schema.vcs-ref": "vcs_ref", + "org.label-schema.vcs-url": "vcs_url", +} + + +CPU_RESOURCE_LIMIT_KEY: Final[str] = "SIMCORE_NANO_CPUS_LIMIT" +MEM_RESOURCE_LIMIT_KEY: Final[str] = "SIMCORE_MEMORY_BYTES_LIMIT" + +APP_REGISTRY_CACHE_DATA_KEY: Final[str] = __name__ + "_registry_cache_data" + +API_ROOT: Final[str] = "api" diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index e1bd33985be..4d288431c2c 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -1,3 +1,6 @@ +import datetime +import warnings + from models_library.basic_types import ( BootModeEnum, BuildTargetEnum, @@ -5,13 +8,16 @@ PortInt, VersionTag, ) -from pydantic import Field, PositiveInt +from pydantic import AnyUrl, ByteSize, Field, PositiveInt, parse_obj_as, validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings +from settings_library.docker_registry import RegistrySettings +from settings_library.postgres import PostgresSettings from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings from .._meta import API_VERSION, API_VTAG, APP_NAME +from ..constants import API_ROOT class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): @@ -64,3 +70,108 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_TRACING: TracingSettings | None = Field( auto_default_from_env=True, description="settings for opentelemetry tracing" ) + + # migrated settings + DIRECTOR_DEFAULT_MAX_NANO_CPUS: int = Field( + default=1 * pow(10, 9), + env=["DIRECTOR_DEFAULT_MAX_NANO_CPUS", "DEFAULT_MAX_NANO_CPUS"], + ) + DIRECTOR_DEFAULT_MAX_MEMORY: int = Field( + default=parse_obj_as(ByteSize, "2GiB"), + env=["DIRECTOR_DEFAULT_MAX_MEMORY", "DEFAULT_MAX_MEMORY"], + ) + DIRECTOR_REGISTRY_CACHING: bool = Field( + default=True, description="cache the docker registry internally" + ) + DIRECTOR_REGISTRY_CACHING_TTL: datetime.timedelta = Field( + default=datetime.timedelta(minutes=15), + description="cache time to live value (defaults to 15 minutes)", + ) + DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str = "" + + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ + str, str + ] = Field(default_factory=dict) + DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: str = "" + DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: str = "" + DIRECTOR_SELF_SIGNED_SSL_FILENAME: str = "" + + DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = 10 + DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = 12 + + DIRECTOR_TRAEFIK_SIMCORE_ZONE: str = Field( + default="internal_simcore_stack", + env=["DIRECTOR_TRAEFIK_SIMCORE_ZONE", "TRAEFIK_SIMCORE_ZONE"], + ) + + DIRECTOR_REGISTRY: RegistrySettings = Field( + auto_default_from_env=True, + description="settings for the private registry deployed with the platform", + ) + + DIRECTOR_EXTRA_HOSTS_SUFFIX: str = Field( + default="undefined", env=["DIRECTOR_EXTRA_HOSTS_SUFFIX", "EXTRA_HOSTS_SUFFIX"] + ) + + DIRECTOR_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) + STORAGE_ENDPOINT: AnyUrl = Field(...) + + # TODO: this needs some code changes + # SERVICES_DEFAULT_ENVS: dict[str, str] = { + # "POSTGRES_ENDPOINT": os.environ.get( + # "POSTGRES_ENDPOINT", "undefined postgres endpoint" + # ), + # "POSTGRES_USER": os.environ.get("POSTGRES_USER", "undefined postgres user"), + # "POSTGRES_PASSWORD": os.environ.get( + # "POSTGRES_PASSWORD", "undefined postgres password" + # ), + # "POSTGRES_DB": os.environ.get("POSTGRES_DB", "undefined postgres db"), + # "STORAGE_ENDPOINT": os.environ.get( + # "STORAGE_ENDPOINT", "undefined storage endpoint" + # ), + # } + + DIRECTOR_PUBLISHED_HOST_NAME: str = Field( + default="", env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] + ) + + DIRECTOR_SWARM_STACK_NAME: str = Field( + default="undefined-please-check", + env=["DIRECTOR_SWARM_STACK_NAME", "SWARM_STACK_NAME"], + ) + + # used when in devel mode vs release mode + DIRECTOR_NODE_SCHEMA_LOCATION: str = Field( + default=f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json", + env=["DIRECTOR_NODE_SCHEMA_LOCATION", "NODE_SCHEMA_LOCATION"], + ) + # used to find the right network name + DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: str | None = Field( + default=None, + env=["DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME", "SIMCORE_SERVICES_NETWORK_NAME"], + ) + # useful when developing with an alternative registry namespace + DIRECTOR_SIMCORE_SERVICES_PREFIX: str = Field( + default="simcore/services", + env=["DIRECTOR_SIMCORE_SERVICES_PREFIX", "SIMCORE_SERVICES_PREFIX"], + ) + + DIRECTOR_MONITORING_ENABLED: bool = Field( + default=False, env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] + ) + + @validator("DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") + @classmethod + def _validate_substitutions(cls, v): + if v: + warnings.warn( # noqa: B028 + "Generic resources will be replaced by the following " + f"placement constraints {v}. This is a workaround " + "for https://github.com/moby/swarmkit/pull/3162", + UserWarning, + ) + if len(v) != len(set(v.values())): + msg = f"Dictionary values must be unique, provided: {v}" + raise ValueError(msg) + + return v From 8a35424ffc4be74866cf0ee2560cdc09d0a9d20f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 16:49:06 +0100 Subject: [PATCH 025/201] migration --- .../cache_request_decorator.py | 14 +-- .../core/application.py | 3 + .../registry_cache_task.py | 57 +++++---- .../registry_proxy.py | 110 +++++++++--------- 4 files changed, 96 insertions(+), 88 deletions(-) diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py index 431a7216e90..67844911d3f 100644 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ b/services/director/src/simcore_service_director/cache_request_decorator.py @@ -1,26 +1,26 @@ from functools import wraps -from typing import Coroutine, Dict, Tuple +from typing import Coroutine -from aiohttp import web +from fastapi import FastAPI from simcore_service_director import config -def cache_requests(func: Coroutine, no_cache: bool = False): +def cache_requests(func: Coroutine, *, no_cache: bool = False): @wraps(func) async def wrapped( - app: web.Application, url: str, method: str, *args, **kwargs - ) -> Tuple[Dict, Dict]: + app: FastAPI, url: str, method: str, *args, **kwargs + ) -> tuple[dict, dict]: is_cache_enabled = config.DIRECTOR_REGISTRY_CACHING and method == "GET" cache_key = f"{url}:{method}" if is_cache_enabled and not no_cache: - cache_data = app[config.APP_REGISTRY_CACHE_DATA_KEY] + cache_data = app.state.registry_cache if cache_key in cache_data: return cache_data[cache_key] resp_data, resp_headers = await func(app, url, method, *args, **kwargs) if is_cache_enabled and not no_cache: - cache_data = app[config.APP_REGISTRY_CACHE_DATA_KEY] + cache_data = app.state.registry_cache cache_data[cache_key] = (resp_data, resp_headers) return (resp_data, resp_headers) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index f75f0c35336..f02d0f3d10e 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -4,6 +4,7 @@ from fastapi import FastAPI from servicelib.fastapi.tracing import setup_tracing +from .. import registry_cache_task from .._meta import ( API_VERSION, API_VTAG, @@ -49,6 +50,8 @@ def create_app(settings: ApplicationSettings) -> FastAPI: if app.state.settings.DIRECTOR_TRACING: setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + registry_cache_task.setup(app) + # ERROR HANDLERS # EVENTS diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py index 10eca38b2b7..100152dfaad 100644 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ b/services/director/src/simcore_service_director/registry_cache_task.py @@ -1,22 +1,24 @@ import asyncio import logging -from typing import AsyncIterator -from aiohttp import web +from fastapi import FastAPI from servicelib.utils import logged_gather -from simcore_service_director import config, exceptions, registry_proxy -from simcore_service_director.config import APP_REGISTRY_CACHE_DATA_KEY + +from . import config, exceptions, registry_proxy +from .core.settings import ApplicationSettings _logger = logging.getLogger(__name__) TASK_NAME: str = __name__ + "_registry_caching_task" -async def registry_caching_task(app: web.Application) -> None: +async def registry_caching_task(app: FastAPI) -> None: try: _logger.info("%s: initializing cache...", TASK_NAME) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() + assert hasattr(app.state, "registry_cache") # nosec + assert isinstance(app.state.registry_cache, dict) # nosec + app.state.registry_cache.clear() await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) _logger.info("%s: initialisation completed", TASK_NAME) while True: @@ -24,7 +26,7 @@ async def registry_caching_task(app: web.Application) -> None: try: keys = [] refresh_tasks = [] - for key in app[APP_REGISTRY_CACHE_DATA_KEY]: + for key in app.state.registry_cache: path, method = key.split(":") _logger.debug("refresh %s:%s", method, path) refresh_tasks.append( @@ -32,18 +34,18 @@ async def registry_caching_task(app: web.Application) -> None: app, path, method, no_cache=True ) ) - keys = list(app[APP_REGISTRY_CACHE_DATA_KEY].keys()) + keys = list(app.state.registry_cache.keys()) results = await logged_gather(*refresh_tasks) - for key, result in zip(keys, results): - app[APP_REGISTRY_CACHE_DATA_KEY][key] = result + for key, result in zip(keys, results, strict=False): + app.state.registry_cache[key] = result except exceptions.DirectorException: # if the registry is temporarily not available this might happen _logger.exception( "%s: exception while refreshing cache, clean cache...", TASK_NAME ) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() + app.state.registry_cache.clear() _logger.info( "cache refreshed %s: sleeping for %ss...", @@ -57,23 +59,30 @@ async def registry_caching_task(app: web.Application) -> None: _logger.exception("%s: Unhandled exception while refreshing cache", TASK_NAME) finally: _logger.info("%s: finished task...clearing cache...", TASK_NAME) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() - + app.state.registry_cache.clear() -async def setup_registry_caching_task(app: web.Application) -> AsyncIterator[None]: - app[APP_REGISTRY_CACHE_DATA_KEY] = {} - app[TASK_NAME] = asyncio.get_event_loop().create_task(registry_caching_task(app)) - yield +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.registry_cache = {} + app.state.registry_cache_task = None + app_settings: ApplicationSettings = app.state.settings + if not app_settings.DIRECTOR_REGISTRY_CACHING: + _logger.info("Registry caching disabled") + return - task = app[TASK_NAME] - task.cancel() - await task + app.state.registry_cache = {} + app.state.registry_cache_task = asyncio.get_event_loop().create_task( + registry_caching_task(app) + ) + async def on_shutdown() -> None: + if app.state.registry_cache_task: + app.state.registry_cache_task.cancel() + await app.state.registry_cache_task -def setup(app: web.Application) -> None: - if config.DIRECTOR_REGISTRY_CACHING: - app.cleanup_ctx.append(setup_registry_caching_task) + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) -__all__ = ["setup", "APP_REGISTRY_CACHE_DATA_KEY"] +__all__ = ["setup"] diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 2c4591aefb6..d3e855a2ed7 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -6,10 +6,11 @@ import re from http import HTTPStatus from pprint import pformat -from typing import Any, AsyncIterator, Dict, List, Optional, Tuple +from typing import Any, AsyncIterator -from aiohttp import BasicAuth, ClientSession, client_exceptions, web +from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout +from fastapi import FastAPI from simcore_service_director import config, exceptions from simcore_service_director.cache_request_decorator import cache_requests from tenacity import retry @@ -39,8 +40,8 @@ class ServiceType(enum.Enum): async def _basic_auth_registry_request( - app: web.Application, path: str, method: str, **session_kwargs -) -> Tuple[Dict, Dict]: + app: FastAPI, path: str, method: str, **session_kwargs +) -> tuple[dict, dict]: if not config.REGISTRY_URL: raise exceptions.DirectorException("URL to registry is not defined") @@ -49,8 +50,8 @@ async def _basic_auth_registry_request( ) logger.debug("Requesting registry using %s", url) # try the registry with basic authentication first, spare 1 call - resp_data: Dict = {} - resp_headers: Dict = {} + resp_data: dict = {} + resp_headers: dict = {} auth = ( BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) if config.REGISTRY_AUTH and config.REGISTRY_USER and config.REGISTRY_PW @@ -87,21 +88,19 @@ async def _basic_auth_registry_request( return (resp_data, resp_headers) except client_exceptions.ClientError as exc: logger.exception("Unknown error while accessing registry: %s", str(exc)) - raise exceptions.DirectorException( - f"Unknown error while accessing registry: {str(exc)}" - ) + msg = f"Unknown error while accessing registry: {str(exc)}" + raise exceptions.DirectorException(msg) from exc async def _auth_registry_request( - url: URL, method: str, auth_headers: Dict, session: ClientSession, **kwargs -) -> Tuple[Dict, Dict]: + url: URL, method: str, auth_headers: dict, session: ClientSession, **kwargs +) -> tuple[dict, dict]: if not config.REGISTRY_AUTH or not config.REGISTRY_USER or not config.REGISTRY_PW: - raise exceptions.RegistryConnectionError( - "Wrong configuration: Authentication to registry is needed!" - ) + msg = "Wrong configuration: Authentication to registry is needed!" + raise exceptions.RegistryConnectionError(msg) # auth issue let's try some authentication get the auth type auth_type = None - auth_details: Dict[str, str] = {} + auth_details: dict[str, str] = {} for key in auth_headers: if str(key).lower() == "www-authenticate": auth_type, auth_value = str(auth_headers[key]).split(" ", 1) @@ -111,9 +110,8 @@ async def _auth_registry_request( } break if not auth_type: - raise exceptions.RegistryConnectionError( - "Unknown registry type: cannot deduce authentication method!" - ) + msg = "Unknown registry type: cannot deduce authentication method!" + raise exceptions.RegistryConnectionError(msg) auth = BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) # bearer type, it needs a token with all communications @@ -124,13 +122,10 @@ async def _auth_registry_request( ) async with session.get(token_url, auth=auth, **kwargs) as token_resp: if not token_resp.status == HTTPStatus.OK: - raise exceptions.RegistryConnectionError( - "Unknown error while authentifying with registry: {}".format( - str(token_resp) - ) - ) + msg = f"Unknown error while authentifying with registry: {token_resp!s}" + raise exceptions.RegistryConnectionError(msg) bearer_code = (await token_resp.json())["token"] - headers = {"Authorization": "Bearer {}".format(bearer_code)} + headers = {"Authorization": f"Bearer {bearer_code}"} async with getattr(session, method.lower())( url, headers=headers, **kwargs ) as resp_wtoken: @@ -163,39 +158,38 @@ async def _auth_registry_request( resp_data = await resp_wbasic.json(content_type=None) resp_headers = resp_wbasic.headers return (resp_data, resp_headers) - raise exceptions.RegistryConnectionError( - f"Unknown registry authentification type: {url}" - ) + msg = f"Unknown registry authentification type: {url}" + raise exceptions.RegistryConnectionError(msg) async def registry_request( - app: web.Application, + app: FastAPI, path: str, method: str = "GET", no_cache: bool = False, **session_kwargs, -) -> Tuple[Dict, Dict]: +) -> tuple[dict, dict]: logger.debug( "Request to registry: path=%s, method=%s. no_cache=%s", path, method, no_cache ) - return await cache_requests(_basic_auth_registry_request, no_cache)( + return await cache_requests(_basic_auth_registry_request, no_cache=no_cache)( app, path, method, **session_kwargs ) -async def is_registry_responsive(app: web.Application) -> bool: +async def is_registry_responsive(app: FastAPI) -> bool: path = "/v2/" try: await registry_request( app, path, no_cache=True, timeout=ClientTimeout(total=1.0) ) return True - except (exceptions.DirectorException, asyncio.TimeoutError) as exc: + except (TimeoutError, exceptions.DirectorException) as exc: logger.debug("Registry not responsive: %s", exc) return False -async def setup_registry(app: web.Application) -> AsyncIterator[None]: +async def setup_registry(app: FastAPI) -> AsyncIterator[None]: logger.debug("pinging registry...") @retry( @@ -204,7 +198,7 @@ async def setup_registry(app: web.Application) -> AsyncIterator[None]: retry=retry_if_result(lambda result: result == False), reraise=True, ) - async def wait_until_registry_responsive(app: web.Application) -> bool: + async def wait_until_registry_responsive(app: FastAPI) -> bool: return await is_registry_responsive(app) await wait_until_registry_responsive(app) @@ -212,11 +206,11 @@ async def wait_until_registry_responsive(app: web.Application) -> bool: yield -async def _list_repositories(app: web.Application) -> List[str]: +async def _list_repositories(app: FastAPI) -> list[str]: logger.debug("listing repositories") # if there are more repos, the Link will be available in the response headers until none available path = f"/v2/_catalog?n={NUMBER_OF_RETRIEVED_REPOS}" - repos_list: List = [] + repos_list: list = [] while True: result, headers = await registry_request(app, path) if result["repositories"]: @@ -228,9 +222,9 @@ async def _list_repositories(app: web.Application) -> List[str]: return repos_list -async def list_image_tags(app: web.Application, image_key: str) -> List[str]: +async def list_image_tags(app: FastAPI, image_key: str) -> list[str]: logger.debug("listing image tags in %s", image_key) - image_tags: List = [] + image_tags: list = [] # get list of image tags path = f"/v2/{image_key}/tags/list?n={NUMBER_OF_RETRIEVED_TAGS}" while True: @@ -243,10 +237,12 @@ async def list_image_tags(app: web.Application, image_key: str) -> List[str]: logger.debug("Found %s image tags in %s", len(image_tags), image_key) return image_tags + _DOCKER_CONTENT_DIGEST_HEADER = "Docker-Content-Digest" -async def get_image_digest(app: web.Application, image: str, tag: str) -> Optional[str]: - """ Returns image manifest digest number or None if fails to obtain it + +async def get_image_digest(app: FastAPI, image: str, tag: str) -> str | None: + """Returns image manifest digest number or None if fails to obtain it The manifest digest is essentially a SHA256 hash of the image manifest @@ -259,8 +255,10 @@ async def get_image_digest(app: web.Application, image: str, tag: str) -> Option return headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) -async def get_image_labels(app: web.Application, image: str, tag: str) -> Tuple[Dict, Optional[str]]: - """ Returns image labels and the image manifest digest """ +async def get_image_labels( + app: FastAPI, image: str, tag: str +) -> tuple[dict, str | None]: + """Returns image labels and the image manifest digest""" logger.debug("getting image labels of %s:%s", image, tag) path = f"/v2/{image}/manifests/{tag}" @@ -272,23 +270,21 @@ async def get_image_labels(app: web.Application, image: str, tag: str) -> Tuple[ labels = container_config["Labels"] headers = headers or {} - manifest_digest = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) + manifest_digest = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) logger.debug("retrieved labels of image %s:%s", image, tag) return (labels, manifest_digest) -async def get_image_details( - app: web.Application, image_key: str, image_tag: str -) -> Dict: - image_details: Dict = {} +async def get_image_details(app: FastAPI, image_key: str, image_tag: str) -> dict: + image_details: dict = {} labels, image_manifest_digest = await get_image_labels(app, image_key, image_tag) if image_manifest_digest: # Adds manifest as extra key in the response similar to org.opencontainers.image.base.digest # SEE https://github.com/opencontainers/image-spec/blob/main/annotations.md#pre-defined-annotation-keys - image_details.update({"image_digest":image_manifest_digest}) + image_details.update({"image_digest": image_manifest_digest}) if not labels: return image_details @@ -297,7 +293,7 @@ async def get_image_details( continue try: label_data = json.loads(labels[key]) - for label_key in label_data.keys(): + for label_key in label_data: image_details[label_key] = label_data[label_key] except json.decoder.JSONDecodeError: logging.exception( @@ -311,7 +307,7 @@ async def get_image_details( return image_details -async def get_repo_details(app: web.Application, image_key: str) -> List[Dict]: +async def get_repo_details(app: FastAPI, image_key: str) -> list[dict]: repo_details = [] image_tags = await list_image_tags(app, image_key) tasks = [get_image_details(app, image_key, tag) for tag in image_tags] @@ -322,7 +318,7 @@ async def get_repo_details(app: web.Application, image_key: str) -> List[Dict]: return repo_details -async def list_services(app: web.Application, service_type: ServiceType) -> List[Dict]: +async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: logger.debug("getting list of services") repos = await _list_repositories(app) # get the services repos @@ -347,8 +343,8 @@ async def list_services(app: web.Application, service_type: ServiceType) -> List async def list_interactive_service_dependencies( - app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: + app: FastAPI, service_key: str, service_tag: str +) -> list[dict]: image_labels, _ = await get_image_labels(app, service_key, service_tag) dependency_keys = [] if DEPENDENCIES_LABEL_KEY in image_labels: @@ -368,7 +364,7 @@ async def list_interactive_service_dependencies( def _get_prefix(service_type: ServiceType) -> str: - return "{}/{}/".format(config.SIMCORE_SERVICES_PREFIX, service_type.value) + return f"{config.SIMCORE_SERVICES_PREFIX}/{service_type.value}/" def get_service_first_name(image_key: str) -> str: @@ -407,7 +403,7 @@ def get_service_last_names(image_key: str) -> str: RESOURCES_ENTRY_NAME = "Resources".lower() -def _validate_kind(entry_to_validate: Dict[str, Any], kind_name: str): +def _validate_kind(entry_to_validate: dict[str, Any], kind_name: str): for element in ( entry_to_validate.get("value", {}) .get("Reservations", {}) @@ -419,8 +415,8 @@ def _validate_kind(entry_to_validate: Dict[str, Any], kind_name: str): async def get_service_extras( - app: web.Application, image_key: str, image_tag: str -) -> Dict[str, Any]: + app: FastAPI, image_key: str, image_tag: str +) -> dict[str, Any]: # check physical node requirements # all nodes require "CPU" result = { From e5769c5239da95bd5010b35d0166182955b049a0 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 16:55:12 +0100 Subject: [PATCH 026/201] migrated to fastapi --- .../src/simcore_service_director/producer.py | 220 ++++++++---------- 1 file changed, 103 insertions(+), 117 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index b74da40c913..2414b3d13ee 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -2,15 +2,15 @@ import json import logging import re -from datetime import datetime, timedelta +from datetime import timedelta from distutils.version import StrictVersion from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Dict, List, Optional, Set, Tuple import aiodocker import aiohttp +import arrow import tenacity from aiohttp import ( ClientConnectionError, @@ -18,8 +18,8 @@ ClientResponse, ClientResponseError, ClientSession, - web, ) +from fastapi import FastAPI from servicelib.async_utils import run_sequentially_in_context from servicelib.monitor_services import service_started, service_stopped from tenacity import retry @@ -36,7 +36,6 @@ from .exceptions import ServiceStateSaveError from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw -from .utils import parse_as_datetime log = logging.getLogger(__name__) @@ -50,7 +49,7 @@ class ServiceState(Enum): FAILED = "failed" -async def _create_auth() -> Dict[str, str]: +async def _create_auth() -> dict[str, str]: return {"username": config.REGISTRY_USER, "password": config.REGISTRY_PW} @@ -68,20 +67,20 @@ async def _check_node_uuid_available( ) except aiodocker.exceptions.DockerError as err: log.exception("Error while retrieving services list") - raise exceptions.GenericDockerError( - "Error while retrieving services", err - ) from err + msg = "Error while retrieving services" + raise exceptions.GenericDockerError(msg, err) from err if list_of_running_services_w_uuid: raise exceptions.ServiceUUIDInUseError(node_uuid) log.debug("UUID %s is free", node_uuid) -def _check_setting_correctness(setting: Dict) -> None: +def _check_setting_correctness(setting: dict) -> None: if "name" not in setting or "type" not in setting or "value" not in setting: - raise exceptions.DirectorException("Invalid setting in %s" % setting) + msg = f"Invalid setting in {setting}" + raise exceptions.DirectorException(msg) -def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: +def _parse_mount_settings(settings: list[dict]) -> list[dict]: mounts = [] for s in settings: log.debug("Retrieved mount settings %s", s) @@ -105,7 +104,7 @@ def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: return mounts -def _parse_env_settings(settings: List[str]) -> Dict: +def _parse_env_settings(settings: list[str]) -> dict: envs = {} for s in settings: log.debug("Retrieved env settings %s", s) @@ -120,8 +119,8 @@ def _parse_env_settings(settings: List[str]) -> Dict: async def _read_service_settings( - app: web.Application, key: str, tag: str, settings_name: str -) -> Dict: + app: FastAPI, key: str, tag: str, settings_name: str +) -> dict: image_labels, _ = await registry_proxy.get_image_labels(app, key, tag) settings = ( json.loads(image_labels[settings_name]) if settings_name in image_labels else {} @@ -140,7 +139,7 @@ def _to_simcore_runtime_docker_label_key(key: str) -> str: # pylint: disable=too-many-branches async def _create_docker_service_params( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, service_key: str, service_tag: str, @@ -149,9 +148,9 @@ async def _create_docker_service_params( node_uuid: str, project_id: str, node_base_path: str, - internal_network_id: Optional[str], + internal_network_id: str | None, request_simcore_user_agent: str, -) -> Dict: +) -> dict: # pylint: disable=too-many-statements service_parameters_labels = await _read_service_settings( app, service_key, service_tag, config.SERVICE_RUNTIME_SETTINGS @@ -222,9 +221,11 @@ async def _create_docker_service_params( "task_template": { "ContainerSpec": container_spec, "Placement": { - "Constraints": ["node.role==worker"] - if await docker_utils.swarm_has_worker_nodes() - else [] + "Constraints": ( + ["node.role==worker"] + if await docker_utils.swarm_has_worker_nodes() + else [] + ) }, "RestartPolicy": { "Condition": "on-failure", @@ -258,9 +259,9 @@ async def _create_docker_service_params( ): "osparc", # fixed no legacy available in other products _to_simcore_runtime_docker_label_key("cpu_limit"): "0", _to_simcore_runtime_docker_label_key("memory_limit"): "0", - _to_simcore_runtime_docker_label_key("type"): "main" - if main_service - else "dependency", + _to_simcore_runtime_docker_label_key("type"): ( + "main" if main_service else "dependency" + ), "io.simcore.zone": f"{config.TRAEFIK_SIMCORE_ZONE}", "traefik.enable": "true" if main_service else "false", f"traefik.http.services.{service_name}.loadbalancer.server.port": "8080", @@ -281,10 +282,7 @@ async def _create_docker_service_params( if reverse_proxy_settings: # some services define strip_path:true if they need the path to be stripped away - if ( - "strip_path" in reverse_proxy_settings - and reverse_proxy_settings["strip_path"] - ): + if reverse_proxy_settings.get("strip_path"): docker_params["labels"][ f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" ] = f"^/x/{node_uuid}" @@ -292,8 +290,8 @@ async def _create_docker_service_params( f"traefik.http.routers.{service_name}.middlewares" ] += f", {service_name}_stripprefixregex" - placement_constraints_to_substitute: List[str] = [] - placement_substitutions: Dict[ + placement_constraints_to_substitute: list[str] = [] + placement_substitutions: dict[ str, str ] = config.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS @@ -336,7 +334,7 @@ async def _create_docker_service_params( "GenericResources" ] - to_remove: Set[str] = set() + to_remove: set[str] = set() for generic_resource in generic_resources: kind = generic_resource["DiscreteResourceSpec"]["Kind"] if kind in placement_substitutions: @@ -389,9 +387,9 @@ async def _create_docker_service_params( ) # placement constraints - elif param["name"] == "constraints": # python-API compatible - docker_params["task_template"]["Placement"]["Constraints"] += param["value"] - elif param["type"] == "Constraints": # REST-API compatible + elif ( + param["name"] == "constraints" or param["type"] == "Constraints" + ): # python-API compatible docker_params["task_template"]["Placement"]["Constraints"] += param["value"] elif param["name"] == "env": log.debug("Found env parameter %s", param["value"]) @@ -402,7 +400,7 @@ async def _create_docker_service_params( ) elif param["name"] == "mount": log.debug("Found mount parameter %s", param["value"]) - mount_settings: List[Dict] = _parse_mount_settings(param["value"]) + mount_settings: list[dict] = _parse_mount_settings(param["value"]) if mount_settings: docker_params["task_template"]["ContainerSpec"]["Mounts"].extend( mount_settings @@ -456,7 +454,7 @@ async def _create_docker_service_params( return docker_params -def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: +def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: log.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) @@ -466,10 +464,10 @@ def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: return "" -async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: +async def _get_swarm_network(client: aiodocker.docker.Docker) -> dict: network_name = "_default" if config.SIMCORE_SERVICES_NETWORK_NAME: - network_name = "{}".format(config.SIMCORE_SERVICES_NETWORK_NAME) + network_name = f"{config.SIMCORE_SERVICES_NETWORK_NAME}" # try to find the network name (usually named STACKNAME_default) networks = [ x @@ -488,8 +486,8 @@ async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: async def _get_docker_image_port_mapping( - service: Dict, -) -> Tuple[Optional[str], Optional[int]]: + service: dict, +) -> tuple[str | None, int | None]: log.debug("getting port published by service: %s", service["Spec"]["Name"]) published_ports = [] @@ -509,12 +507,11 @@ async def _get_docker_image_port_mapping( published_port = published_ports[0] if target_ports: target_port = target_ports[0] - else: - # if empty no port is published but there might still be an internal port defined - if _to_simcore_runtime_docker_label_key("port") in service["Spec"]["Labels"]: - target_port = int( - service["Spec"]["Labels"][_to_simcore_runtime_docker_label_key("port")] - ) + # if empty no port is published but there might still be an internal port defined + elif _to_simcore_runtime_docker_label_key("port") in service["Spec"]["Labels"]: + target_port = int( + service["Spec"]["Labels"][_to_simcore_runtime_docker_label_key("port")] + ) return published_port, target_port @@ -525,7 +522,7 @@ async def _get_docker_image_port_mapping( async def _pass_port_to_service( service_name: str, port: str, - service_boot_parameters_labels: Dict, + service_boot_parameters_labels: dict, session: ClientSession, ) -> None: for param in service_boot_parameters_labels: @@ -578,9 +575,8 @@ async def _create_overlay_network_in_swarm( return docker_network.id except aiodocker.exceptions.DockerError as err: log.exception("Error while creating network for service %s", service_name) - raise exceptions.GenericDockerError( - "Error while creating network", err - ) from err + msg = "Error while creating network" + raise exceptions.GenericDockerError(msg, err) from err async def _remove_overlay_network_of_swarm( @@ -607,22 +603,21 @@ async def _remove_overlay_network_of_swarm( log.exception( "Error while removing networks for service with uuid: %s", node_uuid ) - raise exceptions.GenericDockerError( - "Error while removing networks", err - ) from err + msg = "Error while removing networks" + raise exceptions.GenericDockerError(msg, err) from err async def _get_service_state( - client: aiodocker.docker.Docker, service: Dict -) -> Tuple[ServiceState, str]: + client: aiodocker.docker.Docker, service: dict +) -> tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] log.debug("Getting service %s state", service_name) tasks = await client.tasks.list(filters={"service": service_name}) # wait for tasks - task_started_time = datetime.utcnow() - while (datetime.utcnow() - task_started_time) < timedelta(seconds=20): + task_started_time = arrow.utcnow().datetime + while (arrow.utcnow().datetime - task_started_time) < timedelta(seconds=20): tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) tasks = [x for x in tasks if x["ServiceID"] == service["ID"]] @@ -666,11 +661,10 @@ async def _get_service_state( elif task_state in ("ready", "starting"): last_task_state = ServiceState.STARTING elif task_state in ("running"): - now = datetime.utcnow() + now = arrow.utcnow().datetime # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' - task_state_update_time = parse_as_datetime( - last_task["Status"]["Timestamp"], default=now - ) + last_task["Status"]["Timestamp"] + task_state_update_time = arrow.get(last_task["Status"]["Timestamp"]).datetime time_since_running = now - task_state_update_time log.debug("Now is %s, time since running mode is %s", now, time_since_running) @@ -688,7 +682,7 @@ async def _get_service_state( async def _wait_until_service_running_or_failed( - client: aiodocker.docker.Docker, service: Dict, node_uuid: str + client: aiodocker.docker.Docker, service: dict, node_uuid: str ) -> None: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] @@ -714,9 +708,7 @@ async def _wait_until_service_running_or_failed( log.debug("Waited for service %s to start", service_name) -async def _get_repos_from_key( - app: web.Application, service_key: str -) -> Dict[str, List[Dict]]: +async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[dict]]: # get the available image for the main service (syntax is image:tag) list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) @@ -735,15 +727,14 @@ async def _get_repos_from_key( async def _get_dependant_repos( - app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: + app: FastAPI, service_key: str, service_tag: str +) -> list[dict]: list_of_images = await _get_repos_from_key(app, service_key) tag = await _find_service_tag(list_of_images, service_key, service_tag) # look for dependencies - dependent_repositories = await registry_proxy.list_interactive_service_dependencies( + return await registry_proxy.list_interactive_service_dependencies( app, service_key, tag ) - return dependent_repositories _TAG_REGEX = re.compile(r"^\d+\.\d+\.\d+$") @@ -758,9 +749,9 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: Dict, service_key: str, service_tag: str + list_of_images: dict, service_key: str, service_tag: str ) -> str: - if not service_key in list_of_images: + if service_key not in list_of_images: raise exceptions.ServiceNotAvailableError( service_name=service_key, service_tag=service_tag ) @@ -785,7 +776,7 @@ async def _find_service_tag( async def _start_docker_service( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, user_id: str, project_id: str, @@ -794,9 +785,9 @@ async def _start_docker_service( main_service: bool, node_uuid: str, node_base_path: str, - internal_network_id: Optional[str], + internal_network_id: str | None, request_simcore_user_agent: str, -) -> Dict: # pylint: disable=R0913 +) -> dict: # pylint: disable=R0913 service_parameters = await _create_docker_service_params( app, client, @@ -821,9 +812,8 @@ async def _start_docker_service( service = await client.services.create(**service_parameters) if "ID" not in service: # error while starting service - raise exceptions.DirectorException( - "Error while starting service: {}".format(str(service)) - ) + msg = f"Error while starting service: {service!s}" + raise exceptions.DirectorException(msg) log.debug("Service started now waiting for it to run") # get the full info from docker @@ -848,7 +838,7 @@ async def _start_docker_service( service_name, published_port, service_boot_parameters_labels, session ) - container_meta_data = { + return { "published_port": published_port, "entry_point": service_entrypoint, "service_uuid": node_uuid, @@ -862,7 +852,6 @@ async def _start_docker_service( "user_id": user_id, "project_id": project_id, } - return container_meta_data except exceptions.ServiceStartTimeoutError: log.exception("Service failed to start") @@ -874,7 +863,7 @@ async def _start_docker_service( raise exceptions.ServiceNotAvailableError(service_key, service_tag) from err -async def _silent_service_cleanup(app: web.Application, node_uuid: str) -> None: +async def _silent_service_cleanup(app: FastAPI, node_uuid: str) -> None: try: await stop_service(app, node_uuid, False) except exceptions.DirectorException: @@ -882,15 +871,15 @@ async def _silent_service_cleanup(app: web.Application, node_uuid: str) -> None: async def _create_node( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, user_id: str, project_id: str, - list_of_services: List[Dict], + list_of_services: list[dict], node_uuid: str, node_base_path: str, request_simcore_user_agent: str, -) -> List[Dict]: # pylint: disable=R0913, R0915 +) -> list[dict]: # pylint: disable=R0913, R0915 log.debug( "Creating %s docker services for node %s and base path %s for user %s", len(list_of_services), @@ -930,8 +919,8 @@ async def _create_node( async def _get_service_key_version_from_docker_service( - service: Dict, -) -> Tuple[str, str]: + service: dict, +) -> tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) if not service_full_name.startswith(config.REGISTRY_PATH): raise exceptions.DirectorException( @@ -949,14 +938,14 @@ async def _get_service_key_version_from_docker_service( return service_key, service_tag -async def _get_service_basepath_from_docker_service(service: Dict) -> str: +async def _get_service_basepath_from_docker_service(service: dict) -> str: envs_list = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] envs_dict = dict(x.split("=") for x in envs_list) return envs_dict["SIMCORE_NODE_BASEPATH"] async def start_service( - app: web.Application, + app: FastAPI, user_id: str, project_id: str, service_key: str, @@ -964,7 +953,7 @@ async def start_service( node_uuid: str, node_base_path: str, request_simcore_user_agent: str, -) -> Dict: +) -> dict: # pylint: disable=C0103 log.debug( "starting service %s:%s using uuid %s, basepath %s", @@ -1010,8 +999,8 @@ async def start_service( async def _get_node_details( - app: web.Application, client: aiodocker.docker.Docker, service: Dict -) -> Dict: + app: FastAPI, client: aiodocker.docker.Docker, service: dict +) -> dict: service_key, service_tag = await _get_service_key_version_from_docker_service( service ) @@ -1040,7 +1029,7 @@ async def _get_node_details( # get the published port published_port, target_port = await _get_docker_image_port_mapping(service) - node_details = { + return { "published_port": published_port, "entry_point": service_entrypoint, "service_uuid": service_uuid, @@ -1054,12 +1043,11 @@ async def _get_node_details( "user_id": user_id, "project_id": project_id, } - return node_details async def get_services_details( - app: web.Application, user_id: Optional[str], study_id: Optional[str] -) -> List[Dict]: + app: FastAPI, user_id: str | None, study_id: str | None +) -> list[dict]: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: filters = [ @@ -1078,23 +1066,21 @@ async def get_services_details( filters={"label": filters} ) - services_details = [ + return [ await _get_node_details(app, client, service) for service in list_running_services ] - return services_details except aiodocker.exceptions.DockerError as err: log.exception( "Error while listing services with user_id, study_id %s, %s", user_id, study_id, ) - raise exceptions.GenericDockerError( - "Error while accessing container", err - ) from err + msg = "Error while accessing container" + raise exceptions.GenericDockerError(msg, err) from err -async def get_service_details(app: web.Application, node_uuid: str) -> Dict: +async def get_service_details(app: FastAPI, node_uuid: str) -> dict: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: list_running_services_with_uuid = await client.services.list( @@ -1116,15 +1102,13 @@ async def get_service_details(app: web.Application, node_uuid: str) -> Dict: msg="More than one docker service is labeled as main service" ) - node_details = await _get_node_details( + return await _get_node_details( app, client, list_running_services_with_uuid[0] ) - return node_details except aiodocker.exceptions.DockerError as err: log.exception("Error while accessing container with uuid: %s", node_uuid) - raise exceptions.GenericDockerError( - "Error while accessing container", err - ) from err + msg = "Error while accessing container" + raise exceptions.GenericDockerError(msg, err) from err @retry( @@ -1171,7 +1155,7 @@ async def _save_service_state(service_host_name: str, session: aiohttp.ClientSes @run_sequentially_in_context(target_args=["node_uuid"]) -async def stop_service(app: web.Application, node_uuid: str, save_state: bool) -> None: +async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: log.debug( "stopping service with node_uuid=%s, save_state=%s", node_uuid, save_state ) @@ -1189,9 +1173,8 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - ) except aiodocker.exceptions.DockerError as err: log.exception("Error while stopping container with uuid: %s", node_uuid) - raise exceptions.GenericDockerError( - "Error while stopping container", err - ) from err + msg = "Error while stopping container" + raise exceptions.GenericDockerError(msg, err) from err # error if no service with such an id exists if not list_running_services_with_uuid: @@ -1204,12 +1187,16 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - # FIXME: the exception for the 3d-viewer shall be removed once the dy-sidecar comes in service_host_name = "{}:{}{}".format( service_details["service_host"], - service_details["service_port"] - if service_details["service_port"] - else "80", - service_details["service_basepath"] - if not "3d-viewer" in service_details["service_host"] - else "", + ( + service_details["service_port"] + if service_details["service_port"] + else "80" + ), + ( + service_details["service_basepath"] + if "3d-viewer" not in service_details["service_host"] + else "" + ), ) # If state save is enforced @@ -1243,9 +1230,8 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - await client.services.delete(service["Spec"]["Name"]) except aiodocker.exceptions.DockerError as err: - raise exceptions.GenericDockerError( - "Error while removing services", err - ) from err + msg = "Error while removing services" + raise exceptions.GenericDockerError(msg, err) from err # remove network(s) log.debug("removed services, now removing network...") From b54c7857d7e9bab61baa94df4da947ddbdcd0ced Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:08:20 +0100 Subject: [PATCH 027/201] app fixture --- services/director/requirements/_test.in | 1 + services/director/requirements/_test.txt | 3 +++ services/director/tests/conftest.py | 22 ++++++++++++++++++++++ 3 files changed, 26 insertions(+) diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index eafeb199342..004f8396e81 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -9,6 +9,7 @@ --constraint _base.txt # testing +asgi_lifespan docker faker jsonref diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 656c294334f..1ccd0e2a907 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -2,6 +2,8 @@ anyio==4.6.2.post1 # via # -c requirements/_base.txt # httpx +asgi-lifespan==2.1.0 + # via -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt @@ -95,6 +97,7 @@ sniffio==1.3.1 # via # -c requirements/_base.txt # anyio + # asgi-lifespan # httpx termcolor==2.5.0 # via pytest-sugar diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 458422c658a..925c6c26d03 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -5,12 +5,17 @@ import os from pathlib import Path +from typing import AsyncIterator import pytest import simcore_service_director +from asgi_lifespan import LifespanManager +from fastapi import FastAPI from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director import config, resources +from simcore_service_director.core.application import create_app +from simcore_service_director.core.settings import ApplicationSettings pytest_plugins = [ "fixtures.fake_services", @@ -109,6 +114,23 @@ def app_environment( ) +MAX_TIME_FOR_APP_TO_STARTUP = 10 +MAX_TIME_FOR_APP_TO_SHUTDOWN = 10 + + +@pytest.fixture +async def app( + app_environment: EnvVarsDict, is_pdb_enabled: bool +) -> AsyncIterator[FastAPI]: + the_test_app = create_app(settings=ApplicationSettings.create_from_envs()) + async with LifespanManager( + the_test_app, + startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, + shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, + ): + yield the_test_app + + # @pytest.fixture # async def aiohttp_mock_app(loop, mocker): # print("client session started ...") From a1afb3381fd570dc651435baf4728e82bb858709 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:16:47 +0100 Subject: [PATCH 028/201] refactoring test_handlers --- services/director/tests/api/conftest.py | 34 ++ .../test_rest_running_interactive_services.py | 285 +++++++++++++ .../director/tests/api/test_rest_services.py | 256 +++++++++++ .../director/tests/fixtures/fake_services.py | 342 +++++++++------ services/director/tests/test_handlers.py | 400 ++++++++++-------- 5 files changed, 1011 insertions(+), 306 deletions(-) create mode 100644 services/director/tests/api/conftest.py create mode 100644 services/director/tests/api/test_rest_running_interactive_services.py create mode 100644 services/director/tests/api/test_rest_services.py diff --git a/services/director/tests/api/conftest.py b/services/director/tests/api/conftest.py new file mode 100644 index 00000000000..8cc186e8465 --- /dev/null +++ b/services/director/tests/api/conftest.py @@ -0,0 +1,34 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from collections.abc import AsyncIterator + +import httpx +import pytest +from fastapi import FastAPI +from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict +from httpx._transports.asgi import ASGITransport + + +@pytest.fixture +async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: + # - Needed for app to trigger start/stop event handlers + # - Prefer this client instead of fastapi.testclient.TestClient + async with httpx.AsyncClient( + app=app, + base_url="http://director.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + assert isinstance(client._transport, ASGITransport) + yield client + + +@pytest.fixture +async def created_services( + push_services: PushServicesCallable, +) -> list[ServiceInRegistryInfoDict]: + return await push_services( + number_of_computational_services=3, number_of_interactive_services=2 + ) diff --git a/services/director/tests/api/test_rest_running_interactive_services.py b/services/director/tests/api/test_rest_running_interactive_services.py new file mode 100644 index 00000000000..98c13317871 --- /dev/null +++ b/services/director/tests/api/test_rest_running_interactive_services.py @@ -0,0 +1,285 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import uuid + +import httpx +import pytest +from aioresponses.core import CallbackResult, aioresponses +from fastapi import status + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +@pytest.mark.skip( + reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" +) +async def test_running_services_post_and_delete_no_swarm( + configure_swarm_stack_name, + client: httpx.AsyncClient, + push_services, + user_id, + project_id, + api_version_prefix, +): + params = { + "user_id": "None", + "project_id": "None", + "service_uuid": "sdlfkj4", + "service_key": "simcore/services/comp/some-key", + } + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == 500, data + + +@pytest.mark.parametrize( + "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] +) +async def test_running_services_post_and_delete( + configure_swarm_stack_name, + client: httpx.AsyncClient, + push_services, + docker_swarm, + user_id, + project_id, + api_version_prefix, + save_state: bool | None, + expected_save_state_call: bool, + mocker, +): + params = {} + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + params = { + "user_id": "None", + "project_id": "None", + "service_uuid": "sdlfkj4", + "service_key": "None", + "service_tag": "None", # optional + "service_basepath": "None", # optional + } + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == status.HTTP_400_BAD_REQUEST, data + + params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" + params["service_tag"] = "1.2.3" + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == status.HTTP_404_NOT_FOUND, data + + created_services = await push_services(0, 2) + assert len(created_services) == 2 + for created_service in created_services: + service_description = created_service["service_description"] + params["user_id"] = user_id + params["project_id"] = project_id + params["service_key"] = service_description["key"] + params["service_tag"] = service_description["version"] + service_port = created_service["internal_port"] + service_entry_point = created_service["entry_point"] + params["service_basepath"] = "/i/am/a/basepath" + params["service_uuid"] = str(uuid.uuid4()) + # start the service + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + assert resp.status_code == status.HTTP_201_CREATED + assert resp.encoding == "application/json" + running_service_enveloped = resp.json() + assert isinstance(running_service_enveloped["data"], dict) + assert all( + k in running_service_enveloped["data"] + for k in [ + "service_uuid", + "service_key", + "service_version", + "published_port", + "entry_point", + "service_host", + "service_port", + "service_basepath", + ] + ) + assert ( + running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] + ) + assert running_service_enveloped["data"]["service_key"] == params["service_key"] + assert ( + running_service_enveloped["data"]["service_version"] + == params["service_tag"] + ) + assert running_service_enveloped["data"]["service_port"] == service_port + service_published_port = running_service_enveloped["data"]["published_port"] + assert not service_published_port + assert service_entry_point == running_service_enveloped["data"]["entry_point"] + service_host = running_service_enveloped["data"]["service_host"] + assert service_host == f"test_{params['service_uuid']}" + service_basepath = running_service_enveloped["data"]["service_basepath"] + assert service_basepath == params["service_basepath"] + + # get the service + resp = await client.request( + "GET", + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + ) + assert resp.status_code == status.HTTP_200_OK + text = resp.text + assert resp.encoding == "application/json", f"Got {text=}" + running_service_enveloped = resp.json() + assert isinstance(running_service_enveloped["data"], dict) + assert all( + k in running_service_enveloped["data"] + for k in [ + "service_uuid", + "service_key", + "service_version", + "published_port", + "entry_point", + ] + ) + assert ( + running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] + ) + assert running_service_enveloped["data"]["service_key"] == params["service_key"] + assert ( + running_service_enveloped["data"]["service_version"] + == params["service_tag"] + ) + assert ( + running_service_enveloped["data"]["published_port"] + == service_published_port + ) + assert running_service_enveloped["data"]["entry_point"] == service_entry_point + assert running_service_enveloped["data"]["service_host"] == service_host + assert running_service_enveloped["data"]["service_port"] == service_port + assert running_service_enveloped["data"]["service_basepath"] == service_basepath + + # stop the service + query_params = {} + if save_state: + query_params.update({"save_state": "true" if save_state else "false"}) + + mocked_save_state_cb = mocker.MagicMock( + return_value=CallbackResult(status=200, payload={}) + ) + PASSTHROUGH_REQUESTS_PREFIXES = [ + "http://127.0.0.1", + "http://localhost", + "unix://", # docker engine + "ws://", # websockets + ] + with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: + + # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- + mock.post( + f"http://{service_host}:{service_port}{service_basepath}/state", + status=200, + callback=mocked_save_state_cb, + ) + resp = await client.delete( + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + params=query_params, + ) + if expected_save_state_call: + mocked_save_state_cb.assert_called_once() + + text = resp.text + assert resp.status_code == status.HTTP_204_NO_CONTENT, text + assert resp.encoding == "application/json" + data = resp.json() + assert data is None + + +async def test_running_interactive_services_list_get( + client: httpx.AsyncClient, push_services, docker_swarm +): + """Test case for running_interactive_services_list_get + + Returns a list of interactive services + """ + user_ids = ["first_user_id", "second_user_id"] + project_ids = ["first_project_id", "second_project_id", "third_project_id"] + # prepare services + NUM_SERVICES = 1 + created_services = await push_services(0, NUM_SERVICES) + assert len(created_services) == NUM_SERVICES + # start the services + for user_id in user_ids: + for project_id in project_ids: + for created_service in created_services: + service_description = created_service["service_description"] + params = {} + params["user_id"] = user_id + params["project_id"] = project_id + params["service_key"] = service_description["key"] + params["service_tag"] = service_description["version"] + params["service_uuid"] = str(uuid.uuid4()) + # start the service + resp = await client.post( + "/v0/running_interactive_services", params=params + ) + assert resp.status_code == 201 + # get the list of services + for user_id in user_ids: + for project_id in project_ids: + params = {} + # list by user_id + params["user_id"] = user_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) + assert data + assert not error + services_list = data + assert len(services_list) == len(project_ids) * NUM_SERVICES + # list by user_id and project_id + params["project_id"] = project_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) + assert data + assert not error + services_list = data + assert len(services_list) == NUM_SERVICES + # list by project_id + params = {} + params["project_id"] = project_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) + assert data + assert not error + services_list = data + assert len(services_list) == len(user_ids) * NUM_SERVICES diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py new file mode 100644 index 00000000000..6d5941a6044 --- /dev/null +++ b/services/director/tests/api/test_rest_services.py @@ -0,0 +1,256 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import json +from urllib.parse import quote + +import httpx +from fastapi import status +from fixtures.fake_services import ServiceInRegistryInfoDict +from helpers import json_schema_validator +from simcore_service_director import resources + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): + resp = await client.get(f"/{api_version_prefix}/") + + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + + data, error = _assert_response_and_unwrap_envelope(resp) + assert data + assert not error + + assert data["name"] == "simcore-service-director" + assert data["status"] == "SERVICE_RUNNING" + assert data["version"] == "0.1.0" + assert data["api_version"] == "0.1.0" + + +def _assert_services( + *, + expected: list[ServiceInRegistryInfoDict], + got: list[dict], + schema_version="v1", +): + assert len(expected) == len(got) + + expected_key_version_tuples = [ + (s["service_description"]["key"], s["service_description"]["version"]) + for s in expected + ] + + json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) + assert json_schema_path.exists() is True + with json_schema_path.open() as file_pt: + service_schema = json.load(file_pt) + + for service in got: + service.pop("image_digest", None) + if schema_version == "v1": + assert ( + expected_key_version_tuples.count((service["key"], service["version"])) + == 1 + ) + json_schema_validator.validate_instance_object(service, service_schema) + + +async def test_list_services_with_empty_registry( + docker_registry: str, + client: httpx.AsyncClient, + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + # empty case + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=[], got=services) + + +async def test_list_services( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=created_services, got=services) + + +async def test_get_service_bad_request( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) > 0 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not services + assert error + + +async def test_list_services_by_service_type( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) == 5 + + resp = await client.get( + f"/{api_version_prefix}/services?service_type=computational" + ) + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services + assert len(services) == 3 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services + assert len(services) == 2 + + +async def test_get_services_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + resp = await client.get( + f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + resp = await client.get( + f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_services_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + retrieved_services: list[dict] = [] + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/services/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + assert len(services) == 1 + + retrieved_services.append(services[0]) + + _assert_services(expected=created_services, got=retrieved_services) + + +async def test_get_service_labels( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for service in created_services: + service_description = service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/services/{key}/{version}/labels" + resp = await client.get(url) + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + labels, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + + assert service["docker_labels"] == labels + + +async def test_get_services_extras_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get( + f"/{api_version_prefix}/service_extras/whatever/someversion" + ) + assert resp.status_code == status.HTTP_400_BAD_REQUEST + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_services_extras_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/service_extras/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + + service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert created_service["service_extras"] == service_extras diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index f2954c3c469..76785c039d0 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -1,13 +1,18 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import asyncio import json import logging import random +import sys +from collections.abc import Awaitable, Iterator from io import BytesIO from pathlib import Path +from typing import Any, Literal, Protocol, TypedDict import pytest import requests @@ -19,105 +24,132 @@ _logger = logging.getLogger(__name__) -@pytest.fixture() -def push_services(docker_registry, tmpdir): - registry_url = docker_registry - tmp_dir = Path(tmpdir) +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent - list_of_pushed_images_tags = [] - dependent_images = [] - async def build_push_images( - number_of_computational_services, - number_of_interactive_services, - inter_dependent_services=False, - bad_json_format=False, - version="1.0.", - ): - try: - dependent_image = None - if inter_dependent_services: - dependent_image = await _build_push_image( - tmp_dir, - registry_url, - "computational", - "dependency", - "10.52.999999", - None, - bad_json_format=bad_json_format, - ) - dependent_images.append(dependent_image) +class NodeRequirementsDict(TypedDict): + CPU: float + RAM: float - images_to_build = [] - - for image_index in range(number_of_computational_services): - images_to_build.append( - _build_push_image( - tmp_dir, - registry_url, - "computational", - "test", - version + str(image_index), - dependent_image, - bad_json_format=bad_json_format, - ) - ) - for image_index in range(number_of_interactive_services): - images_to_build.append( - _build_push_image( - tmp_dir, - registry_url, - "dynamic", - "test", - version + str(image_index), - dependent_image, - bad_json_format=bad_json_format, - ) - ) - results = await asyncio.gather(*images_to_build) - list_of_pushed_images_tags.extend(results) - except DockerError: - _logger.exception("Unexpected docker API error") - raise +class ServiceExtrasDict(TypedDict): + node_requirements: NodeRequirementsDict + build_date: str + vcs_ref: str + vcs_url: str - return list_of_pushed_images_tags - yield build_push_images +class ServiceDescriptionDict(TypedDict): + key: str + version: str + type: Literal["computational", "dynamic"] - _logger.info("clean registry") - _clean_registry(registry_url, list_of_pushed_images_tags) - _clean_registry(registry_url, dependent_images) + +class ServiceInRegistryInfoDict(TypedDict): + service_description: ServiceDescriptionDict + docker_labels: dict[str, Any] + image_path: str + internal_port: int | None + entry_point: str + service_extras: ServiceExtrasDict + + +def _create_service_description( + service_type: Literal["computational", "dynamic"], name: str, tag: str +) -> ServiceDescriptionDict: + service_desc = json.loads( + (CURRENT_DIR / "dummy_service_description-v1.json").read_text() + ) + + if service_type == "computational": + service_key_type = "comp" + elif service_type == "dynamic": + service_key_type = "dynamic" + else: + msg = f"Invalid {service_type=}" + raise ValueError(msg) + + service_desc["key"] = f"simcore/services/{service_key_type}/{name}" + service_desc["version"] = tag + service_desc["type"] = service_type + + return service_desc + + +def _create_docker_labels( + service_description: ServiceDescriptionDict, *, bad_json_format: bool +) -> dict[str, str]: + docker_labels = {} + for key, value in service_description.items(): + docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) + if bad_json_format: + docker_labels[".".join(["io", "simcore", key])] = ( + "d32;'" + docker_labels[".".join(["io", "simcore", key])] + ) + + return docker_labels + + +async def _create_base_image(labels, tag) -> dict[str, Any]: + dockerfile = """ +FROM alpine +CMD while true; do sleep 10; done + """ + f = BytesIO(dockerfile.encode("utf-8")) + tar_obj = utils.mktar_from_dockerfile(f) + + # build docker base image + docker = Docker() + base_docker_image = await docker.images.build( + fileobj=tar_obj, encoding="gzip", rm=True, labels=labels, tag=tag + ) + await docker.close() + return base_docker_image -async def _build_push_image( - docker_dir, - registry_url, - service_type, - name, - tag, +async def _build_and_push_image( + registry_url: str, + service_type: Literal["computational", "dynamic"], + name: str, + tag: str, dependent_image=None, *, - bad_json_format=False, -): # pylint: disable=R0913 + bad_json_format: bool = False, +) -> ServiceInRegistryInfoDict: # pylint: disable=R0913 # crate image service_description = _create_service_description(service_type, name, tag) - docker_labels = _create_docker_labels(service_description, bad_json_format) + docker_labels = _create_docker_labels( + service_description, bad_json_format=bad_json_format + ) additional_docker_labels = [ - {"name": "constraints", "type": "string", "value": ["node.role==manager"]} + { + "name": "constraints", + "type": "string", + "value": ["node.role==manager"], + } ] internal_port = None entry_point = "" if service_type == "dynamic": - internal_port = random.randint(1, 65535) + internal_port = random.randint(1, 65535) # noqa: S311 additional_docker_labels.append( - {"name": "ports", "type": "int", "value": internal_port} + { + "name": "ports", + "type": "int", + "value": internal_port, + } ) entry_point = "/test/entry_point" docker_labels["simcore.service.bootsettings"] = json.dumps( - [{"name": "entry_point", "type": "string", "value": entry_point}] + [ + { + "name": "entry_point", + "type": "string", + "value": entry_point, + } + ] ) docker_labels["simcore.service.settings"] = json.dumps(additional_docker_labels) if bad_json_format: @@ -142,15 +174,15 @@ async def _build_push_image( ) # create the typical org.label-schema labels - service_extras = { - "node_requirements": { - "CPU": DEFAULT_MAX_NANO_CPUS / 1e9, - "RAM": DEFAULT_MAX_MEMORY, - }, - "build_date": "2020-08-19T15:36:27Z", - "vcs_ref": "ca180ef1", - "vcs_url": "git@github.com:ITISFoundation/osparc-simcore.git", - } + service_extras = ServiceExtrasDict( + node_requirements=NodeRequirementsDict( + CPU=DEFAULT_MAX_NANO_CPUS / 1e9, + RAM=DEFAULT_MAX_MEMORY, + ), + build_date="2020-08-19T15:36:27Z", + vcs_ref="ca180ef1", + vcs_url="git@github.com:ITISFoundation/osparc-simcore.git", + ) docker_labels["org.label-schema.build-date"] = service_extras["build_date"] docker_labels["org.label-schema.schema-version"] = "1.0" docker_labels["org.label-schema.vcs-ref"] = service_extras["vcs_ref"] @@ -162,22 +194,26 @@ async def _build_push_image( await _create_base_image(docker_labels, image_tag) # push image to registry - docker = Docker() - await docker.images.push(image_tag) - await docker.close() + try: + docker = Docker() + await docker.images.push(image_tag) + finally: + await docker.close() + # remove image from host # docker.images.remove(image_tag) - return { - "service_description": service_description, - "docker_labels": docker_labels, - "image_path": image_tag, - "internal_port": internal_port, - "entry_point": entry_point, - "service_extras": service_extras, - } + + return ServiceInRegistryInfoDict( + service_description=service_description, + docker_labels=docker_labels, + image_path=image_tag, + internal_port=internal_port, + entry_point=entry_point, + service_extras=service_extras, + ) -def _clean_registry(registry_url, list_of_images): +def _clean_registry(registry_url: str, list_of_images: list[ServiceInRegistryInfoDict]): request_headers = {"accept": "application/vnd.docker.distribution.manifest.v2+json"} for image in list_of_images: service_description = image["service_description"] @@ -197,51 +233,83 @@ def _clean_registry(registry_url, list_of_images): response = requests.delete(url, headers=request_headers, timeout=5) -async def _create_base_image(labels, tag): - dockerfile = """ -FROM alpine -CMD while true; do sleep 10; done - """ - f = BytesIO(dockerfile.encode("utf-8")) - tar_obj = utils.mktar_from_dockerfile(f) +class PushServicesCallable(Protocol): + async def __call__( + self, + *, + number_of_computational_services: int, + number_of_interactive_services: int, + inter_dependent_services: bool = False, + bad_json_format: bool = False, + version="1.0.", + ) -> list[ServiceInRegistryInfoDict]: + ... - # build docker base image - docker = Docker() - base_docker_image = await docker.images.build( - fileobj=tar_obj, encoding="gzip", rm=True, labels=labels, tag=tag - ) - await docker.close() - return base_docker_image[0] +@pytest.fixture +def push_services(docker_registry: str) -> Iterator[PushServicesCallable]: + registry_url = docker_registry + list_of_pushed_images_tags: list[ServiceInRegistryInfoDict] = [] + dependent_images = [] -def _create_service_description(service_type, name, tag): - file_name = "dummy_service_description-v1.json" - dummy_description_path = Path(__file__).parent / file_name - with dummy_description_path.open() as file_pt: - service_desc = json.load(file_pt) + async def _build_push_images_to_docker_registry( + *, + number_of_computational_services, + number_of_interactive_services, + inter_dependent_services=False, + bad_json_format=False, + version="1.0.", + ) -> list[ServiceInRegistryInfoDict]: + try: + dependent_image = None + if inter_dependent_services: + dependent_image = await _build_and_push_image( + registry_url=registry_url, + service_type="computational", + name="dependency", + tag="10.52.999999", + dependent_image=None, + bad_json_format=bad_json_format, + ) + dependent_images.append(dependent_image) - if service_type == "computational": - service_key_type = "comp" - elif service_type == "dynamic": - service_key_type = "dynamic" - else: - msg = f"Invalid {service_type=}" - raise ValueError(msg) + images_to_build: list[Awaitable] = [ + _build_and_push_image( + registry_url=registry_url, + service_type="computational", + name="test", + tag=f"{version}{image_index}", + dependent_image=dependent_image, + bad_json_format=bad_json_format, + ) + for image_index in range(number_of_computational_services) + ] + + images_to_build.extend( + [ + _build_and_push_image( + registry_url=registry_url, + service_type="dynamic", + name="test", + tag=f"{version}{image_index}", + dependent_image=dependent_image, + bad_json_format=bad_json_format, + ) + for image_index in range(number_of_interactive_services) + ] + ) - service_desc["key"] = f"simcore/services/{service_key_type}/{name}" - service_desc["version"] = tag - service_desc["type"] = service_type + results = await asyncio.gather(*images_to_build) + list_of_pushed_images_tags.extend(results) - return service_desc + except DockerError: + _logger.exception("Docker API error while building and pushing images") + raise + return list_of_pushed_images_tags -def _create_docker_labels(service_description, bad_json_format): - docker_labels = {} - for key, value in service_description.items(): - docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) - if bad_json_format: - docker_labels[".".join(["io", "simcore", key])] = ( - "d32;'" + docker_labels[".".join(["io", "simcore", key])] - ) + yield _build_push_images_to_docker_registry - return docker_labels + _logger.info("clean registry") + _clean_registry(registry_url, list_of_pushed_images_tags) + _clean_registry(registry_url, dependent_images) diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index 3a326ef6ba2..3bac70c63f7 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -1,58 +1,73 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except # pylint: disable=redefined-outer-name -# pylint: disable=R0915 +# pylint: disable=unused-argument +# pylint: disable=unused-variable # pylint: disable=too-many-arguments import json +import time import uuid +from collections.abc import AsyncIterator from urllib.parse import quote +import httpx import pytest from aioresponses.core import CallbackResult, aioresponses +from fastapi import FastAPI, status +from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict from helpers import json_schema_validator -from servicelib.rest_responses import ( # pylint: disable=no-name-in-module - unwrap_envelope, -) -from simcore_service_director import main, resources, rest +from httpx._transports.asgi import ASGITransport +from simcore_service_director import resources, rest @pytest.fixture -def client( - loop, - aiohttp_client, - aiohttp_unused_port, - configure_schemas_location, - configure_registry_access, -): - app = main.setup_app() - server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) +async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: + # - Needed for app to trigger start/stop event handlers + # - Prefer this client instead of fastapi.testclient.TestClient + async with httpx.AsyncClient( + app=app, + base_url="http://director.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + assert isinstance(client._transport, ASGITransport) + yield client -async def test_root_get(client, api_version_prefix): - web_response = await client.get(f"/{api_version_prefix}/") - assert web_response.content_type == "application/json" - assert web_response.status == 200 - healthcheck_enveloped = await web_response.json() - assert "data" in healthcheck_enveloped +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" - assert isinstance(healthcheck_enveloped["data"], dict) + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") - healthcheck = healthcheck_enveloped["data"] - assert healthcheck["name"] == "simcore-service-director" - assert healthcheck["status"] == "SERVICE_RUNNING" - assert healthcheck["version"] == "0.1.0" - assert healthcheck["api_version"] == "0.1.0" +async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): + resp = await client.get(f"/{api_version_prefix}/") -def _check_services(created_services, services, schema_version="v1"): - assert len(created_services) == len(services) + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + + data, error = _assert_response_and_unwrap_envelope(resp) + assert data + assert not error + + assert data["name"] == "simcore-service-director" + assert data["status"] == "SERVICE_RUNNING" + assert data["version"] == "0.1.0" + assert data["api_version"] == "0.1.0" + + +def _assert_services( + *, + expected: list[ServiceInRegistryInfoDict], + got: list[ServiceInRegistryInfoDict], + schema_version="v1", +): + assert len(expected) == len(got) created_service_descriptions = [ (x["service_description"]["key"], x["service_description"]["version"]) - for x in created_services + for x in expected ] json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) @@ -60,8 +75,8 @@ def _check_services(created_services, services, schema_version="v1"): with json_schema_path.open() as file_pt: service_schema = json.load(file_pt) - for service in services: - service.pop("image_digest") + for service in got: + service.pop("image_digest", None) if schema_version == "v1": assert ( created_service_descriptions.count((service["key"], service["version"])) @@ -70,78 +85,121 @@ def _check_services(created_services, services, schema_version="v1"): json_schema_validator.validate_instance_object(service, service_schema) -async def test_services_get(docker_registry, client, push_services, api_version_prefix): +async def test_list_services_with_empty_registry( + docker_registry: str, + client: httpx.AsyncClient, + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + # empty case - web_response = await client.get(f"/{api_version_prefix}/services") - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - _check_services([], services) - - # some services - created_services = await push_services( + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=[], got=services) + + +@pytest.fixture +async def created_services( + push_services: PushServicesCallable, +) -> list[ServiceInRegistryInfoDict]: + return await push_services( number_of_computational_services=3, number_of_interactive_services=2 ) - web_response = await client.get(f"/{api_version_prefix}/services") - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - _check_services(created_services, services) - - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=blahblah" - ) - assert web_response.status == 400 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert "data" not in services_enveloped - assert "error" in services_enveloped - web_response = await client.get( + +async def test_list_services( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=created_services, got=services) + + +async def test_get_service_bad_request( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) > 0 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not services + assert error + + +async def test_list_services_by_service_type( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) == 5 + + resp = await client.get( f"/{api_version_prefix}/services?service_type=computational" ) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services assert len(services) == 3 - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=interactive" - ) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] + resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services assert len(services) == 2 -async def test_services_by_key_version_get( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - web_response = await client.get( - f"/{api_version_prefix}/services/whatever/someversion" - ) - assert web_response.status == 400 - web_response = await client.get( +async def test_get_services_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" ) - assert web_response.status == 404 - web_response = await client.get( + assert resp.status_code == status.HTTP_404_NOT_FOUND + + resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" ) - assert web_response.status == 404 + assert resp.status_code == status.HTTP_404_NOT_FOUND + - created_services = await push_services(3, 2) +async def test_get_services_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): assert len(created_services) == 5 - retrieved_services = [] + retrieved_services: list[ServiceInRegistryInfoDict] = [] for created_service in created_services: service_description = created_service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! @@ -149,25 +207,26 @@ async def test_services_by_key_version_get( quote(service_description[key], safe="") for key in ("key", "version") ) url = f"/{api_version_prefix}/services/{key}/{version}" - web_response = await client.get(url) + resp = await client.get(url) - assert ( - web_response.status == 200 - ), await web_response.text() # here the error is actually json. - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + assert resp.encoding == "application/json" + services_enveloped = resp.json() assert isinstance(services_enveloped["data"], list) services = services_enveloped["data"] assert len(services) == 1 retrieved_services.append(services[0]) - _check_services(created_services, retrieved_services) + + _assert_services(expected=created_services, got=retrieved_services) async def test_get_service_labels( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - created_services = await push_services(3, 2) + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 for service in created_services: service_description = service["service_description"] @@ -176,32 +235,37 @@ async def test_get_service_labels( quote(service_description[key], safe="") for key in ("key", "version") ) url = f"/{api_version_prefix}/services/{key}/{version}/labels" - web_response = await client.get(url) - assert web_response.status == 200, await web_response.text() + resp = await client.get(url) + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services_enveloped = await web_response.json() + services_enveloped = resp.json() labels = services_enveloped["data"] assert service["docker_labels"] == labels -async def test_services_extras_by_key_version_get( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - web_response = await client.get( +async def test_get_services_extras_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get( f"/{api_version_prefix}/service_extras/whatever/someversion" ) - assert web_response.status == 400 - web_response = await client.get( + assert resp.status_code == status.HTTP_400_BAD_REQUEST + resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" ) - assert web_response.status == 404 - web_response = await client.get( + assert resp.status_code == status.HTTP_404_NOT_FOUND + resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" ) - assert web_response.status == 404 + assert resp.status_code == status.HTTP_404_NOT_FOUND + - created_services = await push_services(3, 2) +async def test_get_services_extras_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): assert len(created_services) == 5 for created_service in created_services: @@ -211,13 +275,11 @@ async def test_services_extras_by_key_version_get( quote(service_description[key], safe="") for key in ("key", "version") ) url = f"/{api_version_prefix}/service_extras/{key}/{version}" - web_response = await client.get(url) + resp = await client.get(url) - assert ( - web_response.status == 200 - ), await web_response.text() # here the error is actually json. - assert web_response.content_type == "application/json" - service_extras_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + assert resp.encoding == "application/json" + service_extras_enveloped = resp.json() assert isinstance(service_extras_enveloped["data"], dict) service_extras = service_extras_enveloped["data"] @@ -225,7 +287,7 @@ async def test_services_extras_by_key_version_get( async def _start_get_stop_services( - client, + client: httpx.AsyncClient, push_services, user_id, project_id, @@ -235,10 +297,10 @@ async def _start_get_stop_services( mocker, ): params = {} - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - assert web_response.status == 400 + assert resp.status_code == status.HTTP_400_BAD_REQUEST params = { "user_id": "None", @@ -248,19 +310,19 @@ async def _start_get_stop_services( "service_tag": "None", # optional "service_basepath": "None", # optional } - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - data = await web_response.json() - assert web_response.status == 400, data + data = resp.json() + assert resp.status_code == status.HTTP_400_BAD_REQUEST, data params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" params["service_tag"] = "1.2.3" - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - data = await web_response.json() - assert web_response.status == 404, data + data = resp.json() + assert resp.status_code == status.HTTP_404_NOT_FOUND, data created_services = await push_services(0, 2) assert len(created_services) == 2 @@ -275,12 +337,12 @@ async def _start_get_stop_services( params["service_basepath"] = "/i/am/a/basepath" params["service_uuid"] = str(uuid.uuid4()) # start the service - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - assert web_response.status == 201 - assert web_response.content_type == "application/json" - running_service_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_201_CREATED + assert resp.encoding == "application/json" + running_service_enveloped = resp.json() assert isinstance(running_service_enveloped["data"], dict) assert all( k in running_service_enveloped["data"] @@ -313,14 +375,14 @@ async def _start_get_stop_services( assert service_basepath == params["service_basepath"] # get the service - web_response = await client.request( + resp = await client.request( "GET", f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", ) - assert web_response.status == 200 - text = await web_response.text() - assert web_response.content_type == "application/json", text - running_service_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_200_OK + text = resp.text + assert resp.encoding == "application/json", f"Got {text=}" + running_service_enveloped = resp.json() assert isinstance(running_service_enveloped["data"], dict) assert all( k in running_service_enveloped["data"] @@ -371,17 +433,17 @@ async def _start_get_stop_services( status=200, callback=mocked_save_state_cb, ) - web_response = await client.delete( + resp = await client.delete( f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", params=query_params, ) if expected_save_state_call: mocked_save_state_cb.assert_called_once() - text = await web_response.text() - assert web_response.status == 204, text - assert web_response.content_type == "application/json" - data = await web_response.json() + text = resp.text + assert resp.status_code == status.HTTP_204_NO_CONTENT, text + assert resp.encoding == "application/json" + data = resp.json() assert data is None @@ -390,7 +452,7 @@ async def _start_get_stop_services( ) async def test_running_services_post_and_delete_no_swarm( configure_swarm_stack_name, - client, + client: httpx.AsyncClient, push_services, user_id, project_id, @@ -402,11 +464,11 @@ async def test_running_services_post_and_delete_no_swarm( "service_uuid": "sdlfkj4", "service_key": "simcore/services/comp/some-key", } - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - data = await web_response.json() - assert web_response.status == 500, data + data = resp.json() + assert resp.status_code == 500, data @pytest.mark.parametrize( @@ -414,7 +476,7 @@ async def test_running_services_post_and_delete_no_swarm( ) async def test_running_services_post_and_delete( configure_swarm_stack_name, - client, + client: httpx.AsyncClient, push_services, docker_swarm, user_id, @@ -437,7 +499,7 @@ async def test_running_services_post_and_delete( async def test_running_interactive_services_list_get( - client, push_services, docker_swarm + client: httpx.AsyncClient, push_services, docker_swarm ): """Test case for running_interactive_services_list_get @@ -461,10 +523,10 @@ async def test_running_interactive_services_list_get( params["service_tag"] = service_description["version"] params["service_uuid"] = str(uuid.uuid4()) # start the service - web_response = await client.post( + resp = await client.post( "/v0/running_interactive_services", params=params ) - assert web_response.status == 201 + assert resp.status_code == 201 # get the list of services for user_id in user_ids: for project_id in project_ids: @@ -472,12 +534,12 @@ async def test_running_interactive_services_list_get( # list by user_id params["user_id"] = user_id response = await client.get( - path="/v0/running_interactive_services", params=params + "/v0/running_interactive_services", params=params ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) assert data assert not error services_list = data @@ -485,12 +547,12 @@ async def test_running_interactive_services_list_get( # list by user_id and project_id params["project_id"] = project_id response = await client.get( - path="/v0/running_interactive_services", params=params + "/v0/running_interactive_services", params=params ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) assert data assert not error services_list = data @@ -499,12 +561,12 @@ async def test_running_interactive_services_list_get( params = {} params["project_id"] = project_id response = await client.get( - path="/v0/running_interactive_services", params=params + "/v0/running_interactive_services", params=params ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) assert data assert not error services_list = data @@ -515,7 +577,6 @@ async def test_running_interactive_services_list_get( async def test_performance_get_services( loop, configure_custom_registry, configure_schemas_location ): - import time fake_request = "fake request" start_time = time.perf_counter() @@ -524,15 +585,16 @@ async def test_performance_get_services( for i in range(number_of_calls): print("calling iteration", i) start_time_i = time.perf_counter() - web_response = await rest.handlers.services_get(fake_request) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = json.loads(web_response.text) + resp = await rest.handlers.services_get(fake_request) + assert resp.status_code == status.HTTP_200_OK + assert resp.encoding == "application/json" + services_enveloped = json.loads(resp.text) assert isinstance(services_enveloped["data"], list) services = services_enveloped["data"] number_of_services = len(services) print("iteration completed in", (time.perf_counter() - start_time_i), "s") stop_time = time.perf_counter() + print( f"Time to run {number_of_calls} times: {stop_time - start_time}s, #services {number_of_services}, time per call {(stop_time - start_time) / number_of_calls / number_of_services}s/service" ) From ff3f2bd905ad46c9720e01926494f6e122e55c20 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:13:04 +0100 Subject: [PATCH 029/201] rm test_handlers -> api --- services/director/tests/test_handlers.py | 600 ----------------------- 1 file changed, 600 deletions(-) delete mode 100644 services/director/tests/test_handlers.py diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py deleted file mode 100644 index 3bac70c63f7..00000000000 --- a/services/director/tests/test_handlers.py +++ /dev/null @@ -1,600 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - -import json -import time -import uuid -from collections.abc import AsyncIterator -from urllib.parse import quote - -import httpx -import pytest -from aioresponses.core import CallbackResult, aioresponses -from fastapi import FastAPI, status -from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict -from helpers import json_schema_validator -from httpx._transports.asgi import ASGITransport -from simcore_service_director import resources, rest - - -@pytest.fixture -async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: - # - Needed for app to trigger start/stop event handlers - # - Prefer this client instead of fastapi.testclient.TestClient - async with httpx.AsyncClient( - app=app, - base_url="http://director.testserver.io", - headers={"Content-Type": "application/json"}, - ) as client: - assert isinstance(client._transport, ASGITransport) - yield client - - -def _assert_response_and_unwrap_envelope(got: httpx.Response): - assert got.encoding == "application/json" - - body = got.json() - assert isinstance(body, dict) - assert "data" in body or "error" in body - return body.get("data"), body.get("error") - - -async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): - resp = await client.get(f"/{api_version_prefix}/") - - assert resp.is_success - assert resp.status_code == status.HTTP_200_OK - - data, error = _assert_response_and_unwrap_envelope(resp) - assert data - assert not error - - assert data["name"] == "simcore-service-director" - assert data["status"] == "SERVICE_RUNNING" - assert data["version"] == "0.1.0" - assert data["api_version"] == "0.1.0" - - -def _assert_services( - *, - expected: list[ServiceInRegistryInfoDict], - got: list[ServiceInRegistryInfoDict], - schema_version="v1", -): - assert len(expected) == len(got) - - created_service_descriptions = [ - (x["service_description"]["key"], x["service_description"]["version"]) - for x in expected - ] - - json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() is True - with json_schema_path.open() as file_pt: - service_schema = json.load(file_pt) - - for service in got: - service.pop("image_digest", None) - if schema_version == "v1": - assert ( - created_service_descriptions.count((service["key"], service["version"])) - == 1 - ) - json_schema_validator.validate_instance_object(service, service_schema) - - -async def test_list_services_with_empty_registry( - docker_registry: str, - client: httpx.AsyncClient, - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - - # empty case - resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert isinstance(services, list) - - _assert_services(expected=[], got=services) - - -@pytest.fixture -async def created_services( - push_services: PushServicesCallable, -) -> list[ServiceInRegistryInfoDict]: - return await push_services( - number_of_computational_services=3, number_of_interactive_services=2 - ) - - -async def test_list_services( - docker_registry: str, - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - - resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert isinstance(services, list) - - _assert_services(expected=created_services, got=services) - - -async def test_get_service_bad_request( - docker_registry: str, - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - assert len(created_services) > 0 - - resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") - assert resp.status_code == status.HTTP_400_BAD_REQUEST - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not services - assert error - - -async def test_list_services_by_service_type( - docker_registry: str, - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - assert len(created_services) == 5 - - resp = await client.get( - f"/{api_version_prefix}/services?service_type=computational" - ) - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert services - assert len(services) == 3 - - resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert services - assert len(services) == 2 - - -async def test_get_services_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str -): - resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") - assert resp.status_code == status.HTTP_400_BAD_REQUEST - - resp = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - resp = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - -async def test_get_services_by_key_and_version( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - retrieved_services: list[ServiceInRegistryInfoDict] = [] - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/services/{key}/{version}" - resp = await client.get(url) - - assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - assert resp.encoding == "application/json" - services_enveloped = resp.json() - - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - assert len(services) == 1 - retrieved_services.append(services[0]) - - _assert_services(expected=created_services, got=retrieved_services) - - -async def test_get_service_labels( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - for service in created_services: - service_description = service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/services/{key}/{version}/labels" - resp = await client.get(url) - assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - - services_enveloped = resp.json() - labels = services_enveloped["data"] - - assert service["docker_labels"] == labels - - -async def test_get_services_extras_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str -): - resp = await client.get( - f"/{api_version_prefix}/service_extras/whatever/someversion" - ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - -async def test_get_services_extras_by_key_and_version( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/service_extras/{key}/{version}" - resp = await client.get(url) - - assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" - assert resp.encoding == "application/json" - service_extras_enveloped = resp.json() - - assert isinstance(service_extras_enveloped["data"], dict) - service_extras = service_extras_enveloped["data"] - assert created_service["service_extras"] == service_extras - - -async def _start_get_stop_services( - client: httpx.AsyncClient, - push_services, - user_id, - project_id, - api_version_prefix: str, - save_state: bool | None, - expected_save_state_call: bool, - mocker, -): - params = {} - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST - - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "None", - "service_tag": "None", # optional - "service_basepath": "None", # optional - } - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == status.HTTP_400_BAD_REQUEST, data - - params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" - params["service_tag"] = "1.2.3" - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == status.HTTP_404_NOT_FOUND, data - - created_services = await push_services(0, 2) - assert len(created_services) == 2 - for created_service in created_services: - service_description = created_service["service_description"] - params["user_id"] = user_id - params["project_id"] = project_id - params["service_key"] = service_description["key"] - params["service_tag"] = service_description["version"] - service_port = created_service["internal_port"] - service_entry_point = created_service["entry_point"] - params["service_basepath"] = "/i/am/a/basepath" - params["service_uuid"] = str(uuid.uuid4()) - # start the service - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - assert resp.status_code == status.HTTP_201_CREATED - assert resp.encoding == "application/json" - running_service_enveloped = resp.json() - assert isinstance(running_service_enveloped["data"], dict) - assert all( - k in running_service_enveloped["data"] - for k in [ - "service_uuid", - "service_key", - "service_version", - "published_port", - "entry_point", - "service_host", - "service_port", - "service_basepath", - ] - ) - assert ( - running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] - ) - assert running_service_enveloped["data"]["service_key"] == params["service_key"] - assert ( - running_service_enveloped["data"]["service_version"] - == params["service_tag"] - ) - assert running_service_enveloped["data"]["service_port"] == service_port - service_published_port = running_service_enveloped["data"]["published_port"] - assert not service_published_port - assert service_entry_point == running_service_enveloped["data"]["entry_point"] - service_host = running_service_enveloped["data"]["service_host"] - assert service_host == f"test_{params['service_uuid']}" - service_basepath = running_service_enveloped["data"]["service_basepath"] - assert service_basepath == params["service_basepath"] - - # get the service - resp = await client.request( - "GET", - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - ) - assert resp.status_code == status.HTTP_200_OK - text = resp.text - assert resp.encoding == "application/json", f"Got {text=}" - running_service_enveloped = resp.json() - assert isinstance(running_service_enveloped["data"], dict) - assert all( - k in running_service_enveloped["data"] - for k in [ - "service_uuid", - "service_key", - "service_version", - "published_port", - "entry_point", - ] - ) - assert ( - running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] - ) - assert running_service_enveloped["data"]["service_key"] == params["service_key"] - assert ( - running_service_enveloped["data"]["service_version"] - == params["service_tag"] - ) - assert ( - running_service_enveloped["data"]["published_port"] - == service_published_port - ) - assert running_service_enveloped["data"]["entry_point"] == service_entry_point - assert running_service_enveloped["data"]["service_host"] == service_host - assert running_service_enveloped["data"]["service_port"] == service_port - assert running_service_enveloped["data"]["service_basepath"] == service_basepath - - # stop the service - query_params = {} - if save_state: - query_params.update({"save_state": "true" if save_state else "false"}) - - mocked_save_state_cb = mocker.MagicMock( - return_value=CallbackResult(status=200, payload={}) - ) - PASSTHROUGH_REQUESTS_PREFIXES = [ - "http://127.0.0.1", - "http://localhost", - "unix://", # docker engine - "ws://", # websockets - ] - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - - # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- - mock.post( - f"http://{service_host}:{service_port}{service_basepath}/state", - status=200, - callback=mocked_save_state_cb, - ) - resp = await client.delete( - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - params=query_params, - ) - if expected_save_state_call: - mocked_save_state_cb.assert_called_once() - - text = resp.text - assert resp.status_code == status.HTTP_204_NO_CONTENT, text - assert resp.encoding == "application/json" - data = resp.json() - assert data is None - - -@pytest.mark.skip( - reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" -) -async def test_running_services_post_and_delete_no_swarm( - configure_swarm_stack_name, - client: httpx.AsyncClient, - push_services, - user_id, - project_id, - api_version_prefix, -): - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "simcore/services/comp/some-key", - } - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == 500, data - - -@pytest.mark.parametrize( - "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] -) -async def test_running_services_post_and_delete( - configure_swarm_stack_name, - client: httpx.AsyncClient, - push_services, - docker_swarm, - user_id, - project_id, - api_version_prefix, - save_state: bool | None, - expected_save_state_call: bool, - mocker, -): - await _start_get_stop_services( - client, - push_services, - user_id, - project_id, - api_version_prefix, - save_state, - expected_save_state_call, - mocker, - ) - - -async def test_running_interactive_services_list_get( - client: httpx.AsyncClient, push_services, docker_swarm -): - """Test case for running_interactive_services_list_get - - Returns a list of interactive services - """ - user_ids = ["first_user_id", "second_user_id"] - project_ids = ["first_project_id", "second_project_id", "third_project_id"] - # prepare services - NUM_SERVICES = 1 - created_services = await push_services(0, NUM_SERVICES) - assert len(created_services) == NUM_SERVICES - # start the services - for user_id in user_ids: - for project_id in project_ids: - for created_service in created_services: - service_description = created_service["service_description"] - params = {} - params["user_id"] = user_id - params["project_id"] = project_id - params["service_key"] = service_description["key"] - params["service_tag"] = service_description["version"] - params["service_uuid"] = str(uuid.uuid4()) - # start the service - resp = await client.post( - "/v0/running_interactive_services", params=params - ) - assert resp.status_code == 201 - # get the list of services - for user_id in user_ids: - for project_id in project_ids: - params = {} - # list by user_id - params["user_id"] = user_id - response = await client.get( - "/v0/running_interactive_services", params=params - ) - assert ( - response.status_code == status.HTTP_200_OK - ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == len(project_ids) * NUM_SERVICES - # list by user_id and project_id - params["project_id"] = project_id - response = await client.get( - "/v0/running_interactive_services", params=params - ) - assert ( - response.status_code == status.HTTP_200_OK - ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == NUM_SERVICES - # list by project_id - params = {} - params["project_id"] = project_id - response = await client.get( - "/v0/running_interactive_services", params=params - ) - assert ( - response.status_code == status.HTTP_200_OK - ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == len(user_ids) * NUM_SERVICES - - -@pytest.mark.skip(reason="test needs credentials to real registry") -async def test_performance_get_services( - loop, configure_custom_registry, configure_schemas_location -): - - fake_request = "fake request" - start_time = time.perf_counter() - number_of_calls = 1 - number_of_services = 0 - for i in range(number_of_calls): - print("calling iteration", i) - start_time_i = time.perf_counter() - resp = await rest.handlers.services_get(fake_request) - assert resp.status_code == status.HTTP_200_OK - assert resp.encoding == "application/json" - services_enveloped = json.loads(resp.text) - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - number_of_services = len(services) - print("iteration completed in", (time.perf_counter() - start_time_i), "s") - stop_time = time.perf_counter() - - print( - f"Time to run {number_of_calls} times: {stop_time - start_time}s, #services {number_of_services}, time per call {(stop_time - start_time) / number_of_calls / number_of_services}s/service" - ) From f4c582f63a6ada996ac971dcc16f6db97943df65 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:14:30 +0100 Subject: [PATCH 030/201] migrated list services --- .../api/rest/_services.py | 48 +++++++++++++++++-- 1 file changed, 44 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 2bf1b066bf3..57601c83f0b 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -1,15 +1,55 @@ +import logging +from typing import Annotated, Any + import arrow -from fastapi import APIRouter +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.generics import Envelope from models_library.services_enums import ServiceType from models_library.services_types import ServiceKey, ServiceVersion +from servicelib.fastapi.dependencies import get_app + +from ... import exceptions, registry_proxy router = APIRouter() +log = logging.getLogger(__name__) + @router.get("/services") -async def list_services(service_type: ServiceType | None = None): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +async def list_services( + the_app: Annotated[FastAPI, Depends(get_app)], + service_type: ServiceType | None = None, +): + log.debug( + "Client does list_services request with service_type %s", + service_type, + ) + try: + services: list[dict[str, Any]] = [] + if not service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.ALL + ) + elif "computational" in service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.COMPUTATIONAL + ) + elif "interactive" in service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.DYNAMIC + ) + # NOTE: the validation is done in the catalog. This entrypoint IS and MUST BE only used by the catalog!! + # NOTE2: the catalog will directly talk to the registry see case #2165 [https://github.com/ITISFoundation/osparc-simcore/issues/2165] + # services = node_validator.validate_nodes(services) + return Envelope[list[dict[str, Any]]](data=services) + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err @router.get("/services/{service_key}/{service_version}") From f57d96927d4f782006b80394ed191459287e0eb4 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:18:49 +0100 Subject: [PATCH 031/201] migrated services endpoint --- .../api/rest/_services.py | 62 ++++++++++++++++--- 1 file changed, 54 insertions(+), 8 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 57601c83f0b..7e3bdb91d51 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -1,7 +1,6 @@ import logging from typing import Annotated, Any -import arrow from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from models_library.generics import Envelope from models_library.services_enums import ServiceType @@ -19,7 +18,7 @@ async def list_services( the_app: Annotated[FastAPI, Depends(get_app)], service_type: ServiceType | None = None, -): +) -> Envelope[list[dict[str, Any]]]: log.debug( "Client does list_services request with service_type %s", service_type, @@ -54,17 +53,64 @@ async def list_services( @router.get("/services/{service_key}/{service_version}") async def get_service( + the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +) -> Envelope[list[dict[str, Any]]]: + log.debug( + "Client does get_service with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + services = [ + await registry_proxy.get_image_details( + the_app, service_key, service_version + ) + ] + return Envelope[list[dict[str, Any]]](data=services) + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err @router.get("/services/{service_key}/{service_version}/labels") async def list_service_labels( + the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +) -> Envelope[dict[str, Any]]: + log.debug( + "Retrieving service labels with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + service_labels, _ = await registry_proxy.get_image_labels( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_labels) + + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err From 0f8556fdfcd385ca6df6c53464a287e2c3b1a88e Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:25:17 +0100 Subject: [PATCH 032/201] cli and models --- .../models/__init__.py | 0 services/director/tests/conftest.py | 3 +- .../director/tests/test__model_examples.py | 28 +++++++++++++++ services/director/tests/test_cli.py | 34 +++++++++++++++++++ 4 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 services/director/src/simcore_service_director/models/__init__.py create mode 100644 services/director/tests/test__model_examples.py create mode 100644 services/director/tests/test_cli.py diff --git a/services/director/src/simcore_service_director/models/__init__.py b/services/director/src/simcore_service_director/models/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 925c6c26d03..d09e4623d40 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -4,8 +4,8 @@ # pylint: disable=too-many-arguments import os +from collections.abc import AsyncIterator from pathlib import Path -from typing import AsyncIterator import pytest import simcore_service_director @@ -19,6 +19,7 @@ pytest_plugins = [ "fixtures.fake_services", + "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", diff --git a/services/director/tests/test__model_examples.py b/services/director/tests/test__model_examples.py new file mode 100644 index 00000000000..d9604d738d6 --- /dev/null +++ b/services/director/tests/test__model_examples.py @@ -0,0 +1,28 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import json +from typing import Any + +import pytest +import simcore_service_director.models +from pydantic import BaseModel, ValidationError +from pytest_simcore.pydantic_models import walk_model_examples_in_package + + +@pytest.mark.parametrize( + "model_cls, example_name, example_data", + walk_model_examples_in_package(simcore_service_director.models), +) +def test_director_service_model_examples( + model_cls: type[BaseModel], example_name: int, example_data: Any +): + try: + assert model_cls.parse_obj(example_data) is not None + except ValidationError as err: + pytest.fail( + f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" + ) diff --git a/services/director/tests/test_cli.py b/services/director/tests/test_cli.py new file mode 100644 index 00000000000..3b42989bcff --- /dev/null +++ b/services/director/tests/test_cli.py @@ -0,0 +1,34 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +import os + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director._meta import API_VERSION +from simcore_service_director.cli import main +from simcore_service_director.core.settings import ApplicationSettings +from typer.testing import CliRunner + + +def test_cli_help_and_version(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + + result = cli_runner.invoke(main, "--version") + assert result.exit_code == os.EX_OK, result.output + assert result.stdout.strip() == API_VERSION + + +def test_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK + + settings = ApplicationSettings.parse_raw(result.output) + assert settings.dict() == ApplicationSettings.create_from_envs().dict() + + +def test_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout From e07ed6b069372d98f288911e37688ad65fa2806b Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:29:11 +0100 Subject: [PATCH 033/201] test healthcheck --- services/director/tests/api/conftest.py | 2 +- services/director/tests/api/test_rest_health.py | 15 +++++++++++++++ .../director/tests/api/test_rest_services.py | 16 ---------------- 3 files changed, 16 insertions(+), 17 deletions(-) create mode 100644 services/director/tests/api/test_rest_health.py diff --git a/services/director/tests/api/conftest.py b/services/director/tests/api/conftest.py index 8cc186e8465..c1d010bb3a2 100644 --- a/services/director/tests/api/conftest.py +++ b/services/director/tests/api/conftest.py @@ -21,7 +21,7 @@ async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: base_url="http://director.testserver.io", headers={"Content-Type": "application/json"}, ) as client: - assert isinstance(client._transport, ASGITransport) + assert isinstance(getattr(client, "_transport", None), ASGITransport) yield client diff --git a/services/director/tests/api/test_rest_health.py b/services/director/tests/api/test_rest_health.py new file mode 100644 index 00000000000..b1e6db622a4 --- /dev/null +++ b/services/director/tests/api/test_rest_health.py @@ -0,0 +1,15 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import httpx +from fastapi import status + + +async def test_healthcheck(client: httpx.AsyncClient, api_version_prefix: str): + resp = await client.get(f"/{api_version_prefix}/") + + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + assert "simcore_service_director" in resp.text diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py index 6d5941a6044..966d64c7974 100644 --- a/services/director/tests/api/test_rest_services.py +++ b/services/director/tests/api/test_rest_services.py @@ -22,22 +22,6 @@ def _assert_response_and_unwrap_envelope(got: httpx.Response): return body.get("data"), body.get("error") -async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): - resp = await client.get(f"/{api_version_prefix}/") - - assert resp.is_success - assert resp.status_code == status.HTTP_200_OK - - data, error = _assert_response_and_unwrap_envelope(resp) - assert data - assert not error - - assert data["name"] == "simcore-service-director" - assert data["status"] == "SERVICE_RUNNING" - assert data["version"] == "0.1.0" - assert data["api_version"] == "0.1.0" - - def _assert_services( *, expected: list[ServiceInRegistryInfoDict], From 7c80aa218bcf01643b375b5ac99fadedfc4c367b Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:35:23 +0100 Subject: [PATCH 034/201] split service extras --- .../director/tests/api/test_rest_services.py | 41 +------------ .../tests/api/test_rest_services_extras.py | 59 +++++++++++++++++++ 2 files changed, 60 insertions(+), 40 deletions(-) create mode 100644 services/director/tests/api/test_rest_services_extras.py diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py index 966d64c7974..3c511e0ef61 100644 --- a/services/director/tests/api/test_rest_services.py +++ b/services/director/tests/api/test_rest_services.py @@ -35,6 +35,7 @@ def _assert_services( for s in expected ] + # TODO: check these are correct! json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) assert json_schema_path.exists() is True with json_schema_path.open() as file_pt: @@ -198,43 +199,3 @@ async def test_get_service_labels( assert not error assert service["docker_labels"] == labels - - -async def test_get_services_extras_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str -): - resp = await client.get( - f"/{api_version_prefix}/service_extras/whatever/someversion" - ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - -async def test_get_services_extras_by_key_and_version( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/service_extras/{key}/{version}" - resp = await client.get(url) - - assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" - - service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert created_service["service_extras"] == service_extras diff --git a/services/director/tests/api/test_rest_services_extras.py b/services/director/tests/api/test_rest_services_extras.py new file mode 100644 index 00000000000..e87a7e4cf4c --- /dev/null +++ b/services/director/tests/api/test_rest_services_extras.py @@ -0,0 +1,59 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from urllib.parse import quote + +import httpx +from fastapi import status +from fixtures.fake_services import ServiceInRegistryInfoDict + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +async def test_get_services_extras_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get( + f"/{api_version_prefix}/service_extras/whatever/someversion" + ) + assert resp.status_code == status.HTTP_400_BAD_REQUEST + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_services_extras_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/service_extras/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + + service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert created_service["service_extras"] == service_extras From 11aa54ed168a5b7853205140f3b67d40d94d5066 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:35:35 +0100 Subject: [PATCH 035/201] split service extras --- .../{test_rest_services_extras.py => test_rest_service_extras.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/director/tests/api/{test_rest_services_extras.py => test_rest_service_extras.py} (100%) diff --git a/services/director/tests/api/test_rest_services_extras.py b/services/director/tests/api/test_rest_service_extras.py similarity index 100% rename from services/director/tests/api/test_rest_services_extras.py rename to services/director/tests/api/test_rest_service_extras.py From 7ec3d19c87df2c92eac2308dbdad060547d63a31 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:54:13 +0100 Subject: [PATCH 036/201] migrated service extras endpoint --- .../api/rest/_service_extras.py | 37 +++++++++++++++++-- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index 1c30f411e22..f301a74a429 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -1,14 +1,43 @@ -import arrow -from fastapi import APIRouter +import logging +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.generics import Envelope from models_library.services_types import ServiceKey, ServiceVersion +from servicelib.fastapi.dependencies import get_app + +from ... import exceptions, registry_proxy router = APIRouter() +log = logging.getLogger(__name__) + @router.get("/service_extras/{service_key}/{service_version}") async def list_service_extras( + the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, ): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + log.debug( + "Client does service_extras_by_key_version_get request with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + service_extras = await registry_proxy.get_service_extras( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_extras) + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err From 0f81b257295ca04ea46a319723aaf1bd6086393b Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:01:21 +0100 Subject: [PATCH 037/201] adds api model --- .../api_schemas_director/__init__.py | 0 .../api_schemas_director/services.py | 5 +++ .../director/tests/api/test_rest_services.py | 40 +++++++------------ 3 files changed, 20 insertions(+), 25 deletions(-) create mode 100644 packages/models-library/src/models_library/api_schemas_director/__init__.py create mode 100644 packages/models-library/src/models_library/api_schemas_director/services.py diff --git a/packages/models-library/src/models_library/api_schemas_director/__init__.py b/packages/models-library/src/models_library/api_schemas_director/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/models-library/src/models_library/api_schemas_director/services.py b/packages/models-library/src/models_library/api_schemas_director/services.py new file mode 100644 index 00000000000..52578fd7a69 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_director/services.py @@ -0,0 +1,5 @@ +from ..services_metadata_published import ServiceMetaDataPublished + + +class ServiceDataGet(ServiceMetaDataPublished): + ... diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py index 3c511e0ef61..b2db82e7902 100644 --- a/services/director/tests/api/test_rest_services.py +++ b/services/director/tests/api/test_rest_services.py @@ -3,14 +3,12 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -import json from urllib.parse import quote import httpx from fastapi import status from fixtures.fake_services import ServiceInRegistryInfoDict -from helpers import json_schema_validator -from simcore_service_director import resources +from models_library.api_schemas_director.services import ServiceDataGet def _assert_response_and_unwrap_envelope(got: httpx.Response): @@ -35,20 +33,12 @@ def _assert_services( for s in expected ] - # TODO: check these are correct! - json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() is True - with json_schema_path.open() as file_pt: - service_schema = json.load(file_pt) - - for service in got: - service.pop("image_digest", None) - if schema_version == "v1": - assert ( - expected_key_version_tuples.count((service["key"], service["version"])) - == 1 - ) - json_schema_validator.validate_instance_object(service, service_schema) + for data in got: + service = ServiceDataGet.parse_obj(data) + assert ( + expected_key_version_tuples.count((f"{service.key}", f"{service.version}")) + == 1 + ) async def test_list_services_with_empty_registry( @@ -60,7 +50,7 @@ async def test_list_services_with_empty_registry( # empty case resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -78,7 +68,7 @@ async def test_list_services( assert docker_registry, "docker-registry is not ready?" resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -97,7 +87,7 @@ async def test_get_service_bad_request( assert len(created_services) > 0 resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not services @@ -116,7 +106,7 @@ async def test_list_services_by_service_type( resp = await client.get( f"/{api_version_prefix}/services?service_type=computational" ) - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -124,7 +114,7 @@ async def test_list_services_by_service_type( assert len(services) == 3 resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -136,17 +126,17 @@ async def test_get_services_by_key_and_version_with_empty_registry( client: httpx.AsyncClient, api_version_prefix: str ): resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" async def test_get_services_by_key_and_version( From b1ab0eca34a80fc536ee39d56392ec79002d8aa3 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:04:02 +0100 Subject: [PATCH 038/201] updates service-extras --- services/director/tests/api/test_rest_service_extras.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director/tests/api/test_rest_service_extras.py b/services/director/tests/api/test_rest_service_extras.py index e87a7e4cf4c..16cd76e254f 100644 --- a/services/director/tests/api/test_rest_service_extras.py +++ b/services/director/tests/api/test_rest_service_extras.py @@ -25,15 +25,15 @@ async def test_get_services_extras_by_key_and_version_with_empty_registry( resp = await client.get( f"/{api_version_prefix}/service_extras/whatever/someversion" ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" async def test_get_services_extras_by_key_and_version( From e66bdd8f356b5176e17b1701f13fca4b8bc098e6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:06:11 +0100 Subject: [PATCH 039/201] import fix --- .../director/src/simcore_service_director/api/rest/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py index 82ccec19302..d00722a5b6e 100644 --- a/services/director/src/simcore_service_director/api/rest/routes.py +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -4,7 +4,7 @@ http_exception_as_json_response, ) -from .._meta import API_VTAG +from ..._meta import API_VTAG from . import _health, _running_interactive_services, _service_extras, _services From 62c83bd3b3f49df0e9659f4eaad92e63fea8ca39 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:16:50 +0100 Subject: [PATCH 040/201] migrated running services --- .../api/rest/_running_interactive_services.py | 140 ++++++++++++++++-- 1 file changed, 124 insertions(+), 16 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py index 1bd53330d30..29a58b80fba 100644 --- a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -1,41 +1,149 @@ +import logging from pathlib import Path +from typing import Annotated, Any from uuid import UUID -import arrow -from fastapi import APIRouter +from fastapi import APIRouter, Depends, FastAPI, Header, HTTPException, status +from models_library.generics import Envelope from models_library.projects import ProjectID from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID +from servicelib.fastapi.dependencies import get_app +from simcore_service_director import exceptions + +from ... import producer router = APIRouter() +log = logging.getLogger(__name__) + @router.get("/running_interactive_services") -async def list_running_services(user_id: UserID, project_id: ProjectID): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +async def list_running_services( + the_app: Annotated[FastAPI, Depends(get_app)], + user_id: UserID | None, + project_id: ProjectID | None, +): + log.debug( + "Client does list_running_services request user_id %s, project_id %s", + user_id, + project_id, + ) + try: + services = await producer.get_services_details( + the_app, + f"{user_id}" if user_id else None, + f"{project_id}" if project_id else None, + ) + return Envelope[list[dict[str, Any]]](data=services) + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err -@router.post("/running_interactive_services") +@router.post( + "/running_interactive_services", + status_code=status.HTTP_201_CREATED, +) async def start_service( + the_app: Annotated[FastAPI, Depends(get_app)], user_id: UserID, project_id: ProjectID, service_key: ServiceKey, service_uuid: UUID, service_basepath: Path, service_tag: ServiceVersion | None = None, -): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + x_simcore_user_agent: str = Header(...), +) -> Envelope[dict[str, Any]]: + log.debug( + "Client does start_service with user_id %s, project_id %s, service %s:%s, service_uuid %s, service_basepath %s, request_simcore_user_agent %s", + user_id, + project_id, + service_key, + service_tag, + service_uuid, + service_basepath, + x_simcore_user_agent, + ) + try: + service = await producer.start_service( + the_app, + f"{user_id}", + f"{project_id}", + service_key, + service_tag, + f"{service_uuid}", + f"{service_basepath}", + x_simcore_user_agent, + ) + return Envelope[dict[str, Any]](data=service) + except exceptions.ServiceStartTimeoutError as err: + + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except exceptions.ServiceUUIDInUseError as err: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail=f"{err}" + ) from err + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err @router.get("/running_interactive_services/{service_uuid}") -async def get_running_service(service_uuid: UUID): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +async def get_running_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_uuid: UUID, +) -> Envelope[dict[str, Any]]: + log.debug( + "Client does get_running_service with service_uuid %s", + service_uuid, + ) + try: + service = await producer.get_service_details(the_app, f"{service_uuid}") + return Envelope[dict[str, Any]](data=service) + except exceptions.ServiceUUIDNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err + +@router.delete( + "/running_interactive_services/{service_uuid}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def stop_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_uuid: UUID, + save_state: bool = True, +): + log.debug( + "Client does stop_service with service_uuid %s", + service_uuid, + ) + try: + await producer.stop_service(the_app, f"{service_uuid}", save_state) -@router.delete("/running_interactive_services/{service_uuid}") -async def stop_service(service_uuid: UUID): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + except exceptions.ServiceUUIDNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err From b18830a70712b69d024955e5c9e0d5e578f80e18 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:17:10 +0100 Subject: [PATCH 041/201] migrated producer --- services/director/src/simcore_service_director/producer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 2414b3d13ee..5fe7477affa 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -749,7 +749,7 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: dict, service_key: str, service_tag: str + list_of_images: dict, service_key: str, service_tag: str | None ) -> str: if service_key not in list_of_images: raise exceptions.ServiceNotAvailableError( @@ -772,6 +772,7 @@ async def _find_service_tag( ) log.debug("Service tag found is %s ", service_tag) + assert tag is not None # nosec return tag @@ -949,7 +950,7 @@ async def start_service( user_id: str, project_id: str, service_key: str, - service_tag: str, + service_tag: str | None, node_uuid: str, node_base_path: str, request_simcore_user_agent: str, From 142953a578b244a3c0aa8d8248cf93b40db1957f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:55:30 +0100 Subject: [PATCH 042/201] setup registry --- .../core/application.py | 7 ++++++ .../registry_proxy.py | 25 +++++++++++++------ 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index f02d0f3d10e..7c5ffbcfb0a 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -2,6 +2,7 @@ from typing import Final from fastapi import FastAPI +from servicelib.client_session import persistent_client_session from servicelib.fastapi.tracing import setup_tracing from .. import registry_cache_task @@ -13,6 +14,8 @@ APP_STARTED_BANNER_MSG, ) from ..api.rest.routes import setup_api_routes +from ..monitoring import setup_app_monitoring +from ..registry_proxy import setup as setup_registry from .settings import ApplicationSettings _LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR @@ -50,7 +53,11 @@ def create_app(settings: ApplicationSettings) -> FastAPI: if app.state.settings.DIRECTOR_TRACING: setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + # replace by httpx client + app.cleanup_ctx.append(persistent_client_session) + setup_registry(app) registry_cache_task.setup(app) + setup_app_monitoring(app, "simcore_service_director") # ERROR HANDLERS diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index d3e855a2ed7..78b9c0b80d9 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -6,7 +6,7 @@ import re from http import HTTPStatus from pprint import pformat -from typing import Any, AsyncIterator +from typing import Any from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout @@ -43,7 +43,8 @@ async def _basic_auth_registry_request( app: FastAPI, path: str, method: str, **session_kwargs ) -> tuple[dict, dict]: if not config.REGISTRY_URL: - raise exceptions.DirectorException("URL to registry is not defined") + msg = "URL to registry is not defined" + raise exceptions.DirectorException(msg) url = URL( f"{'https' if config.REGISTRY_SSL else 'http'}://{config.REGISTRY_URL}{path}" @@ -177,7 +178,7 @@ async def registry_request( ) -async def is_registry_responsive(app: FastAPI) -> bool: +async def _is_registry_responsive(app: FastAPI) -> bool: path = "/v2/" try: await registry_request( @@ -189,21 +190,31 @@ async def is_registry_responsive(app: FastAPI) -> bool: return False -async def setup_registry(app: FastAPI) -> AsyncIterator[None]: +async def _setup_registry(app: FastAPI) -> None: logger.debug("pinging registry...") @retry( wait=wait_fixed(2), before_sleep=before_sleep_log(logger, logging.WARNING), - retry=retry_if_result(lambda result: result == False), + retry=retry_if_result(lambda result: result is False), reraise=True, ) async def wait_until_registry_responsive(app: FastAPI) -> bool: - return await is_registry_responsive(app) + return await _is_registry_responsive(app) await wait_until_registry_responsive(app) logger.info("Connected to docker registry") - yield + + +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + await _setup_registry(app) + + async def on_shutdown() -> None: + ... + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) async def _list_repositories(app: FastAPI) -> list[str]: From ff0f1325276f6c5964bda77cbadb925e4f09f5af Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:56:08 +0100 Subject: [PATCH 043/201] monitoring progress --- .../src/simcore_service_director/monitoring.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/services/director/src/simcore_service_director/monitoring.py b/services/director/src/simcore_service_director/monitoring.py index 403e4ef10ad..91fd841ec64 100644 --- a/services/director/src/simcore_service_director/monitoring.py +++ b/services/director/src/simcore_service_director/monitoring.py @@ -1,14 +1,11 @@ import prometheus_client -from aiohttp import web +from fastapi import FastAPI from prometheus_client import CONTENT_TYPE_LATEST from prometheus_client.registry import CollectorRegistry - - -from servicelib.monitor_services import ( # pylint: disable=no-name-in-module +from servicelib.monitor_services import ( add_instrumentation as add_services_instrumentation, ) - -from . import config +from simcore_service_director.core.settings import ApplicationSettings kCOLLECTOR_REGISTRY = f"{__name__}.collector_registry" @@ -21,8 +18,9 @@ async def metrics_handler(request: web.Request): return resp -def setup_app_monitoring(app: web.Application, app_name: str) -> None: - if not config.MONITORING_ENABLED: +def setup_app_monitoring(app: FastAPI, app_name: str) -> None: + app_settings: ApplicationSettings = app.state.settings + if not app_settings.DIRECTOR_MONITORING_ENABLED: return # app-scope registry app[kCOLLECTOR_REGISTRY] = reg = CollectorRegistry(auto_describe=True) From 95446b9874249731d539f71ef341fdb4a186327a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:57:20 +0100 Subject: [PATCH 044/201] removed generated code --- .../simcore_service_director/rest/__init__.py | 9 - .../rest/generated_code/__init__.py | 0 .../rest/generated_code/models/__init__.py | 25 - .../rest/generated_code/models/base_model_.py | 66 --- .../generated_code/models/error_enveloped.py | 88 ---- .../models/health_check_enveloped.py | 88 ---- .../models/inline_response200.py | 88 ---- .../models/inline_response2001.py | 88 ---- .../models/inline_response2001_authors.py | 120 ----- .../models/inline_response2001_badges.py | 122 ----- .../models/inline_response2001_data.py | 332 ------------- .../models/inline_response2002.py | 88 ---- .../models/inline_response2002_authors.py | 120 ----- .../models/inline_response2002_badges.py | 122 ----- .../models/inline_response2002_data.py | 115 ----- ...inline_response2002_data_container_spec.py | 60 --- ...ine_response2002_data_node_requirements.py | 147 ------ ...response2002_data_service_build_details.py | 110 ----- .../models/inline_response2003.py | 88 ---- .../models/inline_response2003_data.py | 399 ---------------- .../models/inline_response200_data.py | 135 ------ .../models/inline_response201.py | 88 ---- .../models/inline_response_default.py | 88 ---- .../models/inline_response_default_error.py | 118 ----- .../models/running_service_enveloped.py | 88 ---- .../models/running_services_enveloped.py | 88 ---- .../models/service_extras_enveloped.py | 88 ---- .../models/services_enveloped.py | 88 ---- .../generated_code/models/simcore_node.py | 447 ------------------ .../rest/generated_code/routing.py | 94 ---- .../rest/generated_code/typing_utils.py | 32 -- .../rest/generated_code/util.py | 131 ----- .../simcore_service_director/rest/handlers.py | 238 ---------- 33 files changed, 3998 deletions(-) delete mode 100644 services/director/src/simcore_service_director/rest/__init__.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/__init__.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/__init__.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/routing.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/typing_utils.py delete mode 100644 services/director/src/simcore_service_director/rest/generated_code/util.py delete mode 100644 services/director/src/simcore_service_director/rest/handlers.py diff --git a/services/director/src/simcore_service_director/rest/__init__.py b/services/director/src/simcore_service_director/rest/__init__.py deleted file mode 100644 index a7048f43474..00000000000 --- a/services/director/src/simcore_service_director/rest/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. -""" -from .generated_code import ( - models, - util, - routing -) diff --git a/services/director/src/simcore_service_director/rest/generated_code/__init__.py b/services/director/src/simcore_service_director/rest/generated_code/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/__init__.py b/services/director/src/simcore_service_director/rest/generated_code/models/__init__.py deleted file mode 100644 index c758f7a5a10..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -# import models into model package -from .error_enveloped import ErrorEnveloped -from .health_check_enveloped import HealthCheckEnveloped -from .inline_response200 import InlineResponse200 -from .inline_response2001 import InlineResponse2001 -from .inline_response2001_authors import InlineResponse2001Authors -from .inline_response2001_badges import InlineResponse2001Badges -from .inline_response2002 import InlineResponse2002 -from .inline_response2002_data import InlineResponse2002Data -from .inline_response2002_data_container_spec import InlineResponse2002DataContainerSpec -from .inline_response2002_data_node_requirements import InlineResponse2002DataNodeRequirements -from .inline_response2002_data_service_build_details import InlineResponse2002DataServiceBuildDetails -from .inline_response2003 import InlineResponse2003 -from .inline_response2003_data import InlineResponse2003Data -from .inline_response200_data import InlineResponse200Data -from .inline_response201 import InlineResponse201 -from .inline_response_default import InlineResponseDefault -from .inline_response_default_error import InlineResponseDefaultError -from .running_service_enveloped import RunningServiceEnveloped -from .running_services_enveloped import RunningServicesEnveloped -from .service_extras_enveloped import ServiceExtrasEnveloped -from .services_enveloped import ServicesEnveloped -from .simcore_node import SimcoreNode diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py b/services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py deleted file mode 100644 index 5d67f4e0a8e..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/base_model_.py +++ /dev/null @@ -1,66 +0,0 @@ -import pprint - -import typing - -from .. import util - -T = typing.TypeVar('T') - - -class Model(object): - # openapiTypes: The key is attribute name and the - # value is attribute type. - openapi_types = {} - - # attributeMap: The key is attribute name and the - # value is json key in definition. - attribute_map = {} - - @classmethod - def from_dict(cls: T, dikt: dict) -> T: - """Returns the dict as a model""" - return util.deserialize_model(dikt, cls) - - def to_dict(self) -> dict: - """Returns the model properties as a dict - """ - result = {} - - for attr_key, json_key in self.attribute_map.items(): - value = getattr(self, attr_key) - if value is None: - continue - if isinstance(value, list): - result[json_key] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[json_key] = value.to_dict() - elif isinstance(value, dict): - result[json_key] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[json_key] = value - - return result - - def to_str(self) -> str: - """Returns the string representation of the model - """ - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py deleted file mode 100644 index 80829e28b9e..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/error_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response_default_error import InlineResponseDefaultError -from .. import util - - -class ErrorEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: object=None, error: InlineResponseDefaultError=None): - """ErrorEnveloped - a model defined in OpenAPI - - :param data: The data of this ErrorEnveloped. - :param error: The error of this ErrorEnveloped. - """ - self.openapi_types = { - 'data': object, - 'error': InlineResponseDefaultError - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'ErrorEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The ErrorEnveloped of this ErrorEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this ErrorEnveloped. - - - :return: The data of this ErrorEnveloped. - :rtype: object - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this ErrorEnveloped. - - - :param data: The data of this ErrorEnveloped. - :type data: object - """ - - self._data = data - - @property - def error(self): - """Gets the error of this ErrorEnveloped. - - - :return: The error of this ErrorEnveloped. - :rtype: InlineResponseDefaultError - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ErrorEnveloped. - - - :param error: The error of this ErrorEnveloped. - :type error: InlineResponseDefaultError - """ - if error is None: - raise ValueError("Invalid value for `error`, must not be `None`") - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py deleted file mode 100644 index 3906d343690..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/health_check_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response200_data import InlineResponse200Data -from .. import util - - -class HealthCheckEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse200Data=None, error: object=None): - """HealthCheckEnveloped - a model defined in OpenAPI - - :param data: The data of this HealthCheckEnveloped. - :param error: The error of this HealthCheckEnveloped. - """ - self.openapi_types = { - 'data': InlineResponse200Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'HealthCheckEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The HealthCheckEnveloped of this HealthCheckEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this HealthCheckEnveloped. - - - :return: The data of this HealthCheckEnveloped. - :rtype: InlineResponse200Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this HealthCheckEnveloped. - - - :param data: The data of this HealthCheckEnveloped. - :type data: InlineResponse200Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this HealthCheckEnveloped. - - - :return: The error of this HealthCheckEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this HealthCheckEnveloped. - - - :param error: The error of this HealthCheckEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py deleted file mode 100644 index 007a500aced..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response200_data import InlineResponse200Data -from .. import util - - -class InlineResponse200(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse200Data=None, error: object=None): - """InlineResponse200 - a model defined in OpenAPI - - :param data: The data of this InlineResponse200. - :param error: The error of this InlineResponse200. - """ - self.openapi_types = { - 'data': InlineResponse200Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse200': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200 of this InlineResponse200. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse200. - - - :return: The data of this InlineResponse200. - :rtype: InlineResponse200Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse200. - - - :param data: The data of this InlineResponse200. - :type data: InlineResponse200Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse200. - - - :return: The error of this InlineResponse200. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse200. - - - :param error: The error of this InlineResponse200. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py deleted file mode 100644 index 86c64116439..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .simcore_node import SimcoreNode -from .. import util - - -class InlineResponse2001(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[SimcoreNode]=None, error: object=None): - """InlineResponse2001 - a model defined in OpenAPI - - :param data: The data of this InlineResponse2001. - :param error: The error of this InlineResponse2001. - """ - self.openapi_types = { - 'data': List[SimcoreNode], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1 of this InlineResponse2001. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse2001. - - - :return: The data of this InlineResponse2001. - :rtype: List[SimcoreNode] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse2001. - - - :param data: The data of this InlineResponse2001. - :type data: List[SimcoreNode] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse2001. - - - :return: The error of this InlineResponse2001. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse2001. - - - :param error: The error of this InlineResponse2001. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py deleted file mode 100644 index 2fd9d6d8983..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_authors.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2001Authors(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name: str=None, email: str=None, affiliation: str=None): - """InlineResponse2001Authors - a model defined in OpenAPI - - :param name: The name of this InlineResponse2001Authors. - :param email: The email of this InlineResponse2001Authors. - :param affiliation: The affiliation of this InlineResponse2001Authors. - """ - self.openapi_types = { - 'name': str, - 'email': str, - 'affiliation': str - } - - self.attribute_map = { - 'name': 'name', - 'email': 'email', - 'affiliation': 'affiliation' - } - - self._name = name - self._email = email - self._affiliation = affiliation - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001Authors': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1_authors of this InlineResponse2001Authors. - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this InlineResponse2001Authors. - - Name of the author - - :return: The name of this InlineResponse2001Authors. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2001Authors. - - Name of the author - - :param name: The name of this InlineResponse2001Authors. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def email(self): - """Gets the email of this InlineResponse2001Authors. - - Email address - - :return: The email of this InlineResponse2001Authors. - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this InlineResponse2001Authors. - - Email address - - :param email: The email of this InlineResponse2001Authors. - :type email: str - """ - if email is None: - raise ValueError("Invalid value for `email`, must not be `None`") - - self._email = email - - @property - def affiliation(self): - """Gets the affiliation of this InlineResponse2001Authors. - - Affiliation of the author - - :return: The affiliation of this InlineResponse2001Authors. - :rtype: str - """ - return self._affiliation - - @affiliation.setter - def affiliation(self, affiliation): - """Sets the affiliation of this InlineResponse2001Authors. - - Affiliation of the author - - :param affiliation: The affiliation of this InlineResponse2001Authors. - :type affiliation: str - """ - - self._affiliation = affiliation diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py deleted file mode 100644 index 94121a17f3a..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_badges.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2001Badges(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name: str=None, image: str=None, url: str=None): - """InlineResponse2001Badges - a model defined in OpenAPI - - :param name: The name of this InlineResponse2001Badges. - :param image: The image of this InlineResponse2001Badges. - :param url: The url of this InlineResponse2001Badges. - """ - self.openapi_types = { - 'name': str, - 'image': str, - 'url': str - } - - self.attribute_map = { - 'name': 'name', - 'image': 'image', - 'url': 'url' - } - - self._name = name - self._image = image - self._url = url - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001Badges': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1_badges of this InlineResponse2001Badges. - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this InlineResponse2001Badges. - - Name of the subject - - :return: The name of this InlineResponse2001Badges. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2001Badges. - - Name of the subject - - :param name: The name of this InlineResponse2001Badges. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def image(self): - """Gets the image of this InlineResponse2001Badges. - - Url to the shield - - :return: The image of this InlineResponse2001Badges. - :rtype: str - """ - return self._image - - @image.setter - def image(self, image): - """Sets the image of this InlineResponse2001Badges. - - Url to the shield - - :param image: The image of this InlineResponse2001Badges. - :type image: str - """ - if image is None: - raise ValueError("Invalid value for `image`, must not be `None`") - - self._image = image - - @property - def url(self): - """Gets the url of this InlineResponse2001Badges. - - Link to status - - :return: The url of this InlineResponse2001Badges. - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this InlineResponse2001Badges. - - Link to status - - :param url: The url of this InlineResponse2001Badges. - :type url: str - """ - if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") - - self._url = url diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py deleted file mode 100644 index fcfa3b0bf69..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2001_data.py +++ /dev/null @@ -1,332 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -import re -from .. import util - - -class InlineResponse2001Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, entry_point: str=None, published_port: int=None, service_basepath: str='', service_host: str=None, service_key: str=None, service_message: str=None, service_port: int=None, service_state: str=None, service_uuid: str=None, service_version: str=None): - """InlineResponse2001Data - a model defined in OpenAPI - - :param entry_point: The entry_point of this InlineResponse2001Data. - :param published_port: The published_port of this InlineResponse2001Data. - :param service_basepath: The service_basepath of this InlineResponse2001Data. - :param service_host: The service_host of this InlineResponse2001Data. - :param service_key: The service_key of this InlineResponse2001Data. - :param service_message: The service_message of this InlineResponse2001Data. - :param service_port: The service_port of this InlineResponse2001Data. - :param service_state: The service_state of this InlineResponse2001Data. - :param service_uuid: The service_uuid of this InlineResponse2001Data. - :param service_version: The service_version of this InlineResponse2001Data. - """ - self.openapi_types = { - 'entry_point': str, - 'published_port': int, - 'service_basepath': str, - 'service_host': str, - 'service_key': str, - 'service_message': str, - 'service_port': int, - 'service_state': str, - 'service_uuid': str, - 'service_version': str - } - - self.attribute_map = { - 'entry_point': 'entry_point', - 'published_port': 'published_port', - 'service_basepath': 'service_basepath', - 'service_host': 'service_host', - 'service_key': 'service_key', - 'service_message': 'service_message', - 'service_port': 'service_port', - 'service_state': 'service_state', - 'service_uuid': 'service_uuid', - 'service_version': 'service_version' - } - - self._entry_point = entry_point - self._published_port = published_port - self._service_basepath = service_basepath - self._service_host = service_host - self._service_key = service_key - self._service_message = service_message - self._service_port = service_port - self._service_state = service_state - self._service_uuid = service_uuid - self._service_version = service_version - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2001Data': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_1_data of this InlineResponse2001Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def entry_point(self): - """Gets the entry_point of this InlineResponse2001Data. - - The entry point where the service provides its interface if specified - - :return: The entry_point of this InlineResponse2001Data. - :rtype: str - """ - return self._entry_point - - @entry_point.setter - def entry_point(self, entry_point): - """Sets the entry_point of this InlineResponse2001Data. - - The entry point where the service provides its interface if specified - - :param entry_point: The entry_point of this InlineResponse2001Data. - :type entry_point: str - """ - - self._entry_point = entry_point - - @property - def published_port(self): - """Gets the published_port of this InlineResponse2001Data. - - The ports where the service provides its interface - - :return: The published_port of this InlineResponse2001Data. - :rtype: int - """ - return self._published_port - - @published_port.setter - def published_port(self, published_port): - """Sets the published_port of this InlineResponse2001Data. - - The ports where the service provides its interface - - :param published_port: The published_port of this InlineResponse2001Data. - :type published_port: int - """ - if published_port is None: - raise ValueError("Invalid value for `published_port`, must not be `None`") - if published_port is not None and published_port < 1: - raise ValueError("Invalid value for `published_port`, must be a value greater than or equal to `1`") - - self._published_port = published_port - - @property - def service_basepath(self): - """Gets the service_basepath of this InlineResponse2001Data. - - different base path where current service is mounted otherwise defaults to root - - :return: The service_basepath of this InlineResponse2001Data. - :rtype: str - """ - return self._service_basepath - - @service_basepath.setter - def service_basepath(self, service_basepath): - """Sets the service_basepath of this InlineResponse2001Data. - - different base path where current service is mounted otherwise defaults to root - - :param service_basepath: The service_basepath of this InlineResponse2001Data. - :type service_basepath: str - """ - - self._service_basepath = service_basepath - - @property - def service_host(self): - """Gets the service_host of this InlineResponse2001Data. - - service host name within the network - - :return: The service_host of this InlineResponse2001Data. - :rtype: str - """ - return self._service_host - - @service_host.setter - def service_host(self, service_host): - """Sets the service_host of this InlineResponse2001Data. - - service host name within the network - - :param service_host: The service_host of this InlineResponse2001Data. - :type service_host: str - """ - if service_host is None: - raise ValueError("Invalid value for `service_host`, must not be `None`") - - self._service_host = service_host - - @property - def service_key(self): - """Gets the service_key of this InlineResponse2001Data. - - distinctive name for the node based on the docker registry path - - :return: The service_key of this InlineResponse2001Data. - :rtype: str - """ - return self._service_key - - @service_key.setter - def service_key(self, service_key): - """Sets the service_key of this InlineResponse2001Data. - - distinctive name for the node based on the docker registry path - - :param service_key: The service_key of this InlineResponse2001Data. - :type service_key: str - """ - if service_key is None: - raise ValueError("Invalid value for `service_key`, must not be `None`") - if service_key is not None and not re.search(r'^(simcore)\/(services)\/(comp|dynamic)(\/[^\s\/]+)+$', service_key): - raise ValueError("Invalid value for `service_key`, must be a follow pattern or equal to `/^(simcore)\/(services)\/(comp|dynamic)(\/[^\s\/]+)+$/`") - - self._service_key = service_key - - @property - def service_message(self): - """Gets the service_message of this InlineResponse2001Data. - - the service message - - :return: The service_message of this InlineResponse2001Data. - :rtype: str - """ - return self._service_message - - @service_message.setter - def service_message(self, service_message): - """Sets the service_message of this InlineResponse2001Data. - - the service message - - :param service_message: The service_message of this InlineResponse2001Data. - :type service_message: str - """ - - self._service_message = service_message - - @property - def service_port(self): - """Gets the service_port of this InlineResponse2001Data. - - port to access the service within the network - - :return: The service_port of this InlineResponse2001Data. - :rtype: int - """ - return self._service_port - - @service_port.setter - def service_port(self, service_port): - """Sets the service_port of this InlineResponse2001Data. - - port to access the service within the network - - :param service_port: The service_port of this InlineResponse2001Data. - :type service_port: int - """ - if service_port is None: - raise ValueError("Invalid value for `service_port`, must not be `None`") - if service_port is not None and service_port < 1: - raise ValueError("Invalid value for `service_port`, must be a value greater than or equal to `1`") - - self._service_port = service_port - - @property - def service_state(self): - """Gets the service_state of this InlineResponse2001Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :return: The service_state of this InlineResponse2001Data. - :rtype: str - """ - return self._service_state - - @service_state.setter - def service_state(self, service_state): - """Sets the service_state of this InlineResponse2001Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :param service_state: The service_state of this InlineResponse2001Data. - :type service_state: str - """ - allowed_values = ["pending", "pulling", "starting", "running", "complete", "failed"] - if service_state not in allowed_values: - raise ValueError( - "Invalid value for `service_state` ({0}), must be one of {1}" - .format(service_state, allowed_values) - ) - - self._service_state = service_state - - @property - def service_uuid(self): - """Gets the service_uuid of this InlineResponse2001Data. - - The UUID attached to this service - - :return: The service_uuid of this InlineResponse2001Data. - :rtype: str - """ - return self._service_uuid - - @service_uuid.setter - def service_uuid(self, service_uuid): - """Sets the service_uuid of this InlineResponse2001Data. - - The UUID attached to this service - - :param service_uuid: The service_uuid of this InlineResponse2001Data. - :type service_uuid: str - """ - if service_uuid is None: - raise ValueError("Invalid value for `service_uuid`, must not be `None`") - - self._service_uuid = service_uuid - - @property - def service_version(self): - """Gets the service_version of this InlineResponse2001Data. - - semantic version number - - :return: The service_version of this InlineResponse2001Data. - :rtype: str - """ - return self._service_version - - @service_version.setter - def service_version(self, service_version): - """Sets the service_version of this InlineResponse2001Data. - - semantic version number - - :param service_version: The service_version of this InlineResponse2001Data. - :type service_version: str - """ - if service_version is None: - raise ValueError("Invalid value for `service_version`, must not be `None`") - if service_version is not None and not re.search(r'^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$', service_version): - raise ValueError("Invalid value for `service_version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`") - - self._service_version = service_version diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py deleted file mode 100644 index ffeb93d434d..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2002_data import InlineResponse2002Data -from .. import util - - -class InlineResponse2002(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2002Data=None, error: object=None): - """InlineResponse2002 - a model defined in OpenAPI - - :param data: The data of this InlineResponse2002. - :param error: The error of this InlineResponse2002. - """ - self.openapi_types = { - 'data': InlineResponse2002Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2 of this InlineResponse2002. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse2002. - - - :return: The data of this InlineResponse2002. - :rtype: InlineResponse2002Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse2002. - - - :param data: The data of this InlineResponse2002. - :type data: InlineResponse2002Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse2002. - - - :return: The error of this InlineResponse2002. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse2002. - - - :param error: The error of this InlineResponse2002. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py deleted file mode 100644 index 5a6d37c0b68..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_authors.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002Authors(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, affiliation: str=None, email: str=None, name: str=None): - """InlineResponse2002Authors - a model defined in OpenAPI - - :param affiliation: The affiliation of this InlineResponse2002Authors. - :param email: The email of this InlineResponse2002Authors. - :param name: The name of this InlineResponse2002Authors. - """ - self.openapi_types = { - 'affiliation': str, - 'email': str, - 'name': str - } - - self.attribute_map = { - 'affiliation': 'affiliation', - 'email': 'email', - 'name': 'name' - } - - self._affiliation = affiliation - self._email = email - self._name = name - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002Authors': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_authors of this InlineResponse2002Authors. - """ - return util.deserialize_model(dikt, cls) - - @property - def affiliation(self): - """Gets the affiliation of this InlineResponse2002Authors. - - Affiliation of the author - - :return: The affiliation of this InlineResponse2002Authors. - :rtype: str - """ - return self._affiliation - - @affiliation.setter - def affiliation(self, affiliation): - """Sets the affiliation of this InlineResponse2002Authors. - - Affiliation of the author - - :param affiliation: The affiliation of this InlineResponse2002Authors. - :type affiliation: str - """ - - self._affiliation = affiliation - - @property - def email(self): - """Gets the email of this InlineResponse2002Authors. - - Email address - - :return: The email of this InlineResponse2002Authors. - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this InlineResponse2002Authors. - - Email address - - :param email: The email of this InlineResponse2002Authors. - :type email: str - """ - if email is None: - raise ValueError("Invalid value for `email`, must not be `None`") - - self._email = email - - @property - def name(self): - """Gets the name of this InlineResponse2002Authors. - - Name of the author - - :return: The name of this InlineResponse2002Authors. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2002Authors. - - Name of the author - - :param name: The name of this InlineResponse2002Authors. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py deleted file mode 100644 index 20fb1cf7741..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_badges.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002Badges(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, image: str=None, name: str=None, url: str=None): - """InlineResponse2002Badges - a model defined in OpenAPI - - :param image: The image of this InlineResponse2002Badges. - :param name: The name of this InlineResponse2002Badges. - :param url: The url of this InlineResponse2002Badges. - """ - self.openapi_types = { - 'image': str, - 'name': str, - 'url': str - } - - self.attribute_map = { - 'image': 'image', - 'name': 'name', - 'url': 'url' - } - - self._image = image - self._name = name - self._url = url - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002Badges': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_badges of this InlineResponse2002Badges. - """ - return util.deserialize_model(dikt, cls) - - @property - def image(self): - """Gets the image of this InlineResponse2002Badges. - - Url to the shield - - :return: The image of this InlineResponse2002Badges. - :rtype: str - """ - return self._image - - @image.setter - def image(self, image): - """Sets the image of this InlineResponse2002Badges. - - Url to the shield - - :param image: The image of this InlineResponse2002Badges. - :type image: str - """ - if image is None: - raise ValueError("Invalid value for `image`, must not be `None`") - - self._image = image - - @property - def name(self): - """Gets the name of this InlineResponse2002Badges. - - Name of the subject - - :return: The name of this InlineResponse2002Badges. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse2002Badges. - - Name of the subject - - :param name: The name of this InlineResponse2002Badges. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def url(self): - """Gets the url of this InlineResponse2002Badges. - - Link to status - - :return: The url of this InlineResponse2002Badges. - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this InlineResponse2002Badges. - - Link to status - - :param url: The url of this InlineResponse2002Badges. - :type url: str - """ - if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") - - self._url = url diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py deleted file mode 100644 index 2fbc19bdc16..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data.py +++ /dev/null @@ -1,115 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2002_data_container_spec import InlineResponse2002DataContainerSpec -from .inline_response2002_data_node_requirements import InlineResponse2002DataNodeRequirements -from .inline_response2002_data_service_build_details import InlineResponse2002DataServiceBuildDetails -from .. import util - - -class InlineResponse2002Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, node_requirements: InlineResponse2002DataNodeRequirements=None, service_build_details: InlineResponse2002DataServiceBuildDetails=None, container_spec: InlineResponse2002DataContainerSpec=None): - """InlineResponse2002Data - a model defined in OpenAPI - - :param node_requirements: The node_requirements of this InlineResponse2002Data. - :param service_build_details: The service_build_details of this InlineResponse2002Data. - :param container_spec: The container_spec of this InlineResponse2002Data. - """ - self.openapi_types = { - 'node_requirements': InlineResponse2002DataNodeRequirements, - 'service_build_details': InlineResponse2002DataServiceBuildDetails, - 'container_spec': InlineResponse2002DataContainerSpec - } - - self.attribute_map = { - 'node_requirements': 'node_requirements', - 'service_build_details': 'service_build_details', - 'container_spec': 'container_spec' - } - - self._node_requirements = node_requirements - self._service_build_details = service_build_details - self._container_spec = container_spec - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002Data': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data of this InlineResponse2002Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def node_requirements(self): - """Gets the node_requirements of this InlineResponse2002Data. - - - :return: The node_requirements of this InlineResponse2002Data. - :rtype: InlineResponse2002DataNodeRequirements - """ - return self._node_requirements - - @node_requirements.setter - def node_requirements(self, node_requirements): - """Sets the node_requirements of this InlineResponse2002Data. - - - :param node_requirements: The node_requirements of this InlineResponse2002Data. - :type node_requirements: InlineResponse2002DataNodeRequirements - """ - if node_requirements is None: - raise ValueError("Invalid value for `node_requirements`, must not be `None`") - - self._node_requirements = node_requirements - - @property - def service_build_details(self): - """Gets the service_build_details of this InlineResponse2002Data. - - - :return: The service_build_details of this InlineResponse2002Data. - :rtype: InlineResponse2002DataServiceBuildDetails - """ - return self._service_build_details - - @service_build_details.setter - def service_build_details(self, service_build_details): - """Sets the service_build_details of this InlineResponse2002Data. - - - :param service_build_details: The service_build_details of this InlineResponse2002Data. - :type service_build_details: InlineResponse2002DataServiceBuildDetails - """ - - self._service_build_details = service_build_details - - @property - def container_spec(self): - """Gets the container_spec of this InlineResponse2002Data. - - - :return: The container_spec of this InlineResponse2002Data. - :rtype: InlineResponse2002DataContainerSpec - """ - return self._container_spec - - @container_spec.setter - def container_spec(self, container_spec): - """Sets the container_spec of this InlineResponse2002Data. - - - :param container_spec: The container_spec of this InlineResponse2002Data. - :type container_spec: InlineResponse2002DataContainerSpec - """ - - self._container_spec = container_spec diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py deleted file mode 100644 index 6309f0fe94a..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_container_spec.py +++ /dev/null @@ -1,60 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002DataContainerSpec(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, command: List[str]=None): - """InlineResponse2002DataContainerSpec - a model defined in OpenAPI - - :param command: The command of this InlineResponse2002DataContainerSpec. - """ - self.openapi_types = { - 'command': List[str] - } - - self.attribute_map = { - 'command': 'command' - } - - self._command = command - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002DataContainerSpec': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data_container_spec of this InlineResponse2002DataContainerSpec. - """ - return util.deserialize_model(dikt, cls) - - @property - def command(self): - """Gets the command of this InlineResponse2002DataContainerSpec. - - - :return: The command of this InlineResponse2002DataContainerSpec. - :rtype: List[str] - """ - return self._command - - @command.setter - def command(self, command): - """Sets the command of this InlineResponse2002DataContainerSpec. - - - :param command: The command of this InlineResponse2002DataContainerSpec. - :type command: List[str] - """ - - self._command = command diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py deleted file mode 100644 index 3a1f8bb6e08..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_node_requirements.py +++ /dev/null @@ -1,147 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002DataNodeRequirements(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, cpu: float=1, gpu: int=None, ram: int=None, mpi: int=None): - """InlineResponse2002DataNodeRequirements - a model defined in OpenAPI - - :param cpu: The cpu of this InlineResponse2002DataNodeRequirements. - :param gpu: The gpu of this InlineResponse2002DataNodeRequirements. - :param ram: The ram of this InlineResponse2002DataNodeRequirements. - :param mpi: The mpi of this InlineResponse2002DataNodeRequirements. - """ - self.openapi_types = { - 'cpu': float, - 'gpu': int, - 'ram': int, - 'mpi': int - } - - self.attribute_map = { - 'cpu': 'CPU', - 'gpu': 'GPU', - 'ram': 'RAM', - 'mpi': 'MPI' - } - - self._cpu = cpu - self._gpu = gpu - self._ram = ram - self._mpi = mpi - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002DataNodeRequirements': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data_node_requirements of this InlineResponse2002DataNodeRequirements. - """ - return util.deserialize_model(dikt, cls) - - @property - def cpu(self): - """Gets the cpu of this InlineResponse2002DataNodeRequirements. - - - :return: The cpu of this InlineResponse2002DataNodeRequirements. - :rtype: float - """ - return self._cpu - - @cpu.setter - def cpu(self, cpu): - """Sets the cpu of this InlineResponse2002DataNodeRequirements. - - - :param cpu: The cpu of this InlineResponse2002DataNodeRequirements. - :type cpu: float - """ - if cpu is None: - raise ValueError("Invalid value for `cpu`, must not be `None`") - if cpu is not None and cpu < 1: - raise ValueError("Invalid value for `cpu`, must be a value greater than or equal to `1`") - - self._cpu = cpu - - @property - def gpu(self): - """Gets the gpu of this InlineResponse2002DataNodeRequirements. - - - :return: The gpu of this InlineResponse2002DataNodeRequirements. - :rtype: int - """ - return self._gpu - - @gpu.setter - def gpu(self, gpu): - """Sets the gpu of this InlineResponse2002DataNodeRequirements. - - - :param gpu: The gpu of this InlineResponse2002DataNodeRequirements. - :type gpu: int - """ - if gpu is not None and gpu < 0: - raise ValueError("Invalid value for `gpu`, must be a value greater than or equal to `0`") - - self._gpu = gpu - - @property - def ram(self): - """Gets the ram of this InlineResponse2002DataNodeRequirements. - - - :return: The ram of this InlineResponse2002DataNodeRequirements. - :rtype: int - """ - return self._ram - - @ram.setter - def ram(self, ram): - """Sets the ram of this InlineResponse2002DataNodeRequirements. - - - :param ram: The ram of this InlineResponse2002DataNodeRequirements. - :type ram: int - """ - if ram is None: - raise ValueError("Invalid value for `ram`, must not be `None`") - if ram is not None and ram < 1024: - raise ValueError("Invalid value for `ram`, must be a value greater than or equal to `1024`") - - self._ram = ram - - @property - def mpi(self): - """Gets the mpi of this InlineResponse2002DataNodeRequirements. - - - :return: The mpi of this InlineResponse2002DataNodeRequirements. - :rtype: int - """ - return self._mpi - - @mpi.setter - def mpi(self, mpi): - """Sets the mpi of this InlineResponse2002DataNodeRequirements. - - - :param mpi: The mpi of this InlineResponse2002DataNodeRequirements. - :type mpi: int - """ - if mpi is not None and mpi > 1: - raise ValueError("Invalid value for `mpi`, must be a value less than or equal to `1`") - - self._mpi = mpi diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py deleted file mode 100644 index 35ab8473235..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2002_data_service_build_details.py +++ /dev/null @@ -1,110 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse2002DataServiceBuildDetails(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, build_date: str=None, vcs_ref: str=None, vcs_url: str=None): - """InlineResponse2002DataServiceBuildDetails - a model defined in OpenAPI - - :param build_date: The build_date of this InlineResponse2002DataServiceBuildDetails. - :param vcs_ref: The vcs_ref of this InlineResponse2002DataServiceBuildDetails. - :param vcs_url: The vcs_url of this InlineResponse2002DataServiceBuildDetails. - """ - self.openapi_types = { - 'build_date': str, - 'vcs_ref': str, - 'vcs_url': str - } - - self.attribute_map = { - 'build_date': 'build_date', - 'vcs_ref': 'vcs_ref', - 'vcs_url': 'vcs_url' - } - - self._build_date = build_date - self._vcs_ref = vcs_ref - self._vcs_url = vcs_url - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2002DataServiceBuildDetails': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_2_data_service_build_details of this InlineResponse2002DataServiceBuildDetails. - """ - return util.deserialize_model(dikt, cls) - - @property - def build_date(self): - """Gets the build_date of this InlineResponse2002DataServiceBuildDetails. - - - :return: The build_date of this InlineResponse2002DataServiceBuildDetails. - :rtype: str - """ - return self._build_date - - @build_date.setter - def build_date(self, build_date): - """Sets the build_date of this InlineResponse2002DataServiceBuildDetails. - - - :param build_date: The build_date of this InlineResponse2002DataServiceBuildDetails. - :type build_date: str - """ - - self._build_date = build_date - - @property - def vcs_ref(self): - """Gets the vcs_ref of this InlineResponse2002DataServiceBuildDetails. - - - :return: The vcs_ref of this InlineResponse2002DataServiceBuildDetails. - :rtype: str - """ - return self._vcs_ref - - @vcs_ref.setter - def vcs_ref(self, vcs_ref): - """Sets the vcs_ref of this InlineResponse2002DataServiceBuildDetails. - - - :param vcs_ref: The vcs_ref of this InlineResponse2002DataServiceBuildDetails. - :type vcs_ref: str - """ - - self._vcs_ref = vcs_ref - - @property - def vcs_url(self): - """Gets the vcs_url of this InlineResponse2002DataServiceBuildDetails. - - - :return: The vcs_url of this InlineResponse2002DataServiceBuildDetails. - :rtype: str - """ - return self._vcs_url - - @vcs_url.setter - def vcs_url(self, vcs_url): - """Sets the vcs_url of this InlineResponse2002DataServiceBuildDetails. - - - :param vcs_url: The vcs_url of this InlineResponse2002DataServiceBuildDetails. - :type vcs_url: str - """ - - self._vcs_url = vcs_url diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py deleted file mode 100644 index 3c527146f9d..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class InlineResponse2003(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[InlineResponse2003Data]=None, error: object=None): - """InlineResponse2003 - a model defined in OpenAPI - - :param data: The data of this InlineResponse2003. - :param error: The error of this InlineResponse2003. - """ - self.openapi_types = { - 'data': List[InlineResponse2003Data], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse2003': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_3 of this InlineResponse2003. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse2003. - - - :return: The data of this InlineResponse2003. - :rtype: List[InlineResponse2003Data] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse2003. - - - :param data: The data of this InlineResponse2003. - :type data: List[InlineResponse2003Data] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse2003. - - - :return: The error of this InlineResponse2003. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse2003. - - - :param error: The error of this InlineResponse2003. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py deleted file mode 100644 index bb3c3804dda..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response2003_data.py +++ /dev/null @@ -1,399 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -import re -from .. import util - - -class InlineResponse2003Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - published_port: int = None, - entry_point: str = None, - service_uuid: str = None, - service_key: str = None, - service_version: str = None, - service_host: str = None, - service_port: int = None, - service_basepath: str = "", - service_state: str = None, - service_message: str = None, - user_id: str = None, - ): - """InlineResponse2003Data - a model defined in OpenAPI - - :param published_port: The published_port of this InlineResponse2003Data. - :param entry_point: The entry_point of this InlineResponse2003Data. - :param service_uuid: The service_uuid of this InlineResponse2003Data. - :param service_key: The service_key of this InlineResponse2003Data. - :param service_version: The service_version of this InlineResponse2003Data. - :param service_host: The service_host of this InlineResponse2003Data. - :param service_port: The service_port of this InlineResponse2003Data. - :param service_basepath: The service_basepath of this InlineResponse2003Data. - :param service_state: The service_state of this InlineResponse2003Data. - :param service_message: The service_message of this InlineResponse2003Data. - :param user_id: The user_id of this InlineResponse2003Data. - """ - self.openapi_types = { - "published_port": int, - "entry_point": str, - "service_uuid": str, - "service_key": str, - "service_version": str, - "service_host": str, - "service_port": int, - "service_basepath": str, - "service_state": str, - "service_message": str, - "user_id": str, - } - - self.attribute_map = { - "published_port": "published_port", - "entry_point": "entry_point", - "service_uuid": "service_uuid", - "service_key": "service_key", - "service_version": "service_version", - "service_host": "service_host", - "service_port": "service_port", - "service_basepath": "service_basepath", - "service_state": "service_state", - "service_message": "service_message", - "user_id": "user_id", - } - - self._published_port = published_port - self._entry_point = entry_point - self._service_uuid = service_uuid - self._service_key = service_key - self._service_version = service_version - self._service_host = service_host - self._service_port = service_port - self._service_basepath = service_basepath - self._service_state = service_state - self._service_message = service_message - self._user_id = user_id - - @classmethod - def from_dict(cls, dikt: dict) -> "InlineResponse2003Data": - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_3_data of this InlineResponse2003Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def published_port(self): - """Gets the published_port of this InlineResponse2003Data. - - The ports where the service provides its interface - - :return: The published_port of this InlineResponse2003Data. - :rtype: int - """ - return self._published_port - - @published_port.setter - def published_port(self, published_port): - """Sets the published_port of this InlineResponse2003Data. - - The ports where the service provides its interface - - :param published_port: The published_port of this InlineResponse2003Data. - :type published_port: int - """ - if published_port is None: - raise ValueError("Invalid value for `published_port`, must not be `None`") - if published_port is not None and published_port < 1: - raise ValueError( - "Invalid value for `published_port`, must be a value greater than or equal to `1`" - ) - - self._published_port = published_port - - @property - def entry_point(self): - """Gets the entry_point of this InlineResponse2003Data. - - The entry point where the service provides its interface if specified - - :return: The entry_point of this InlineResponse2003Data. - :rtype: str - """ - return self._entry_point - - @entry_point.setter - def entry_point(self, entry_point): - """Sets the entry_point of this InlineResponse2003Data. - - The entry point where the service provides its interface if specified - - :param entry_point: The entry_point of this InlineResponse2003Data. - :type entry_point: str - """ - - self._entry_point = entry_point - - @property - def service_uuid(self): - """Gets the service_uuid of this InlineResponse2003Data. - - The UUID attached to this service - - :return: The service_uuid of this InlineResponse2003Data. - :rtype: str - """ - return self._service_uuid - - @service_uuid.setter - def service_uuid(self, service_uuid): - """Sets the service_uuid of this InlineResponse2003Data. - - The UUID attached to this service - - :param service_uuid: The service_uuid of this InlineResponse2003Data. - :type service_uuid: str - """ - if service_uuid is None: - raise ValueError("Invalid value for `service_uuid`, must not be `None`") - - self._service_uuid = service_uuid - - @property - def service_key(self): - """Gets the service_key of this InlineResponse2003Data. - - distinctive name for the node based on the docker registry path - - :return: The service_key of this InlineResponse2003Data. - :rtype: str - """ - return self._service_key - - @service_key.setter - def service_key(self, service_key): - """Sets the service_key of this InlineResponse2003Data. - - distinctive name for the node based on the docker registry path - - :param service_key: The service_key of this InlineResponse2003Data. - :type service_key: str - """ - if service_key is None: - raise ValueError("Invalid value for `service_key`, must not be `None`") - if service_key is not None and not re.search( - r"^simcore/services/" - r"(?P(comp|dynamic|frontend))/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$", - service_key, - ): - raise ValueError( - r"Invalid value for `service_key`, must be a follow pattern or equal to `/^(simcore)\/(services)\/(comp|dynamic)(\/[\w\/-]+)+$/`" - ) - - self._service_key = service_key - - @property - def service_version(self): - """Gets the service_version of this InlineResponse2003Data. - - semantic version number - - :return: The service_version of this InlineResponse2003Data. - :rtype: str - """ - return self._service_version - - @service_version.setter - def service_version(self, service_version): - """Sets the service_version of this InlineResponse2003Data. - - semantic version number - - :param service_version: The service_version of this InlineResponse2003Data. - :type service_version: str - """ - if service_version is None: - raise ValueError("Invalid value for `service_version`, must not be `None`") - if service_version is not None and not re.search( - r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[\da-zA-Z-]+)*)?$", - service_version, - ): - raise ValueError( - r"Invalid value for `service_version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`" - ) - - self._service_version = service_version - - @property - def service_host(self): - """Gets the service_host of this InlineResponse2003Data. - - service host name within the network - - :return: The service_host of this InlineResponse2003Data. - :rtype: str - """ - return self._service_host - - @service_host.setter - def service_host(self, service_host): - """Sets the service_host of this InlineResponse2003Data. - - service host name within the network - - :param service_host: The service_host of this InlineResponse2003Data. - :type service_host: str - """ - if service_host is None: - raise ValueError("Invalid value for `service_host`, must not be `None`") - - self._service_host = service_host - - @property - def service_port(self): - """Gets the service_port of this InlineResponse2003Data. - - port to access the service within the network - - :return: The service_port of this InlineResponse2003Data. - :rtype: int - """ - return self._service_port - - @service_port.setter - def service_port(self, service_port): - """Sets the service_port of this InlineResponse2003Data. - - port to access the service within the network - - :param service_port: The service_port of this InlineResponse2003Data. - :type service_port: int - """ - if service_port is None: - raise ValueError("Invalid value for `service_port`, must not be `None`") - if service_port is not None and service_port < 1: - raise ValueError( - "Invalid value for `service_port`, must be a value greater than or equal to `1`" - ) - - self._service_port = service_port - - @property - def service_basepath(self): - """Gets the service_basepath of this InlineResponse2003Data. - - different base path where current service is mounted otherwise defaults to root - - :return: The service_basepath of this InlineResponse2003Data. - :rtype: str - """ - return self._service_basepath - - @service_basepath.setter - def service_basepath(self, service_basepath): - """Sets the service_basepath of this InlineResponse2003Data. - - different base path where current service is mounted otherwise defaults to root - - :param service_basepath: The service_basepath of this InlineResponse2003Data. - :type service_basepath: str - """ - - self._service_basepath = service_basepath - - @property - def service_state(self): - """Gets the service_state of this InlineResponse2003Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :return: The service_state of this InlineResponse2003Data. - :rtype: str - """ - return self._service_state - - @service_state.setter - def service_state(self, service_state): - """Sets the service_state of this InlineResponse2003Data. - - the service state * 'pending' - The service is waiting for resources to start * 'pulling' - The service is being pulled from the registry * 'starting' - The service is starting * 'running' - The service is running * 'complete' - The service completed * 'failed' - The service failed to start - - :param service_state: The service_state of this InlineResponse2003Data. - :type service_state: str - """ - allowed_values = [ - "pending", - "pulling", - "starting", - "running", - "complete", - "failed", - ] - if service_state not in allowed_values: - raise ValueError( - "Invalid value for `service_state` ({0}), must be one of {1}".format( - service_state, allowed_values - ) - ) - - self._service_state = service_state - - @property - def service_message(self): - """Gets the service_message of this InlineResponse2003Data. - - the service message - - :return: The service_message of this InlineResponse2003Data. - :rtype: str - """ - return self._service_message - - @service_message.setter - def service_message(self, service_message): - """Sets the service_message of this InlineResponse2003Data. - - the service message - - :param service_message: The service_message of this InlineResponse2003Data. - :type service_message: str - """ - - self._service_message = service_message - - @property - def user_id(self): - """Gets the user_id of this InlineResponse2003Data. - - the user that started the service - - :return: The user_id of this InlineResponse2003Data. - :rtype: str - """ - return self._user_id - - @user_id.setter - def user_id(self, user_id): - """Sets the user_id of this InlineResponse2003Data. - - the user that started the service - - :param user_id: The user_id of this InlineResponse2003Data. - :type user_id: str - """ - if user_id is None: - raise ValueError("Invalid value for `user_id`, must not be `None`") - - self._user_id = user_id diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py deleted file mode 100644 index 1cc495dbbbd..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response200_data.py +++ /dev/null @@ -1,135 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponse200Data(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, name: str=None, status: str=None, api_version: str=None, version: str=None): - """InlineResponse200Data - a model defined in OpenAPI - - :param name: The name of this InlineResponse200Data. - :param status: The status of this InlineResponse200Data. - :param api_version: The api_version of this InlineResponse200Data. - :param version: The version of this InlineResponse200Data. - """ - self.openapi_types = { - 'name': str, - 'status': str, - 'api_version': str, - 'version': str - } - - self.attribute_map = { - 'name': 'name', - 'status': 'status', - 'api_version': 'api_version', - 'version': 'version' - } - - self._name = name - self._status = status - self._api_version = api_version - self._version = version - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse200Data': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_200_data of this InlineResponse200Data. - """ - return util.deserialize_model(dikt, cls) - - @property - def name(self): - """Gets the name of this InlineResponse200Data. - - - :return: The name of this InlineResponse200Data. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this InlineResponse200Data. - - - :param name: The name of this InlineResponse200Data. - :type name: str - """ - - self._name = name - - @property - def status(self): - """Gets the status of this InlineResponse200Data. - - - :return: The status of this InlineResponse200Data. - :rtype: str - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this InlineResponse200Data. - - - :param status: The status of this InlineResponse200Data. - :type status: str - """ - - self._status = status - - @property - def api_version(self): - """Gets the api_version of this InlineResponse200Data. - - - :return: The api_version of this InlineResponse200Data. - :rtype: str - """ - return self._api_version - - @api_version.setter - def api_version(self, api_version): - """Sets the api_version of this InlineResponse200Data. - - - :param api_version: The api_version of this InlineResponse200Data. - :type api_version: str - """ - - self._api_version = api_version - - @property - def version(self): - """Gets the version of this InlineResponse200Data. - - - :return: The version of this InlineResponse200Data. - :rtype: str - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this InlineResponse200Data. - - - :param version: The version of this InlineResponse200Data. - :type version: str - """ - - self._version = version diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py deleted file mode 100644 index 221a60352b6..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response201.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class InlineResponse201(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2003Data=None, error: object=None): - """InlineResponse201 - a model defined in OpenAPI - - :param data: The data of this InlineResponse201. - :param error: The error of this InlineResponse201. - """ - self.openapi_types = { - 'data': InlineResponse2003Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponse201': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_201 of this InlineResponse201. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponse201. - - - :return: The data of this InlineResponse201. - :rtype: InlineResponse2003Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponse201. - - - :param data: The data of this InlineResponse201. - :type data: InlineResponse2003Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponse201. - - - :return: The error of this InlineResponse201. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponse201. - - - :param error: The error of this InlineResponse201. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py deleted file mode 100644 index 3dd0b09b399..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response_default_error import InlineResponseDefaultError -from .. import util - - -class InlineResponseDefault(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: object=None, error: InlineResponseDefaultError=None): - """InlineResponseDefault - a model defined in OpenAPI - - :param data: The data of this InlineResponseDefault. - :param error: The error of this InlineResponseDefault. - """ - self.openapi_types = { - 'data': object, - 'error': InlineResponseDefaultError - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponseDefault': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_default of this InlineResponseDefault. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this InlineResponseDefault. - - - :return: The data of this InlineResponseDefault. - :rtype: object - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this InlineResponseDefault. - - - :param data: The data of this InlineResponseDefault. - :type data: object - """ - - self._data = data - - @property - def error(self): - """Gets the error of this InlineResponseDefault. - - - :return: The error of this InlineResponseDefault. - :rtype: InlineResponseDefaultError - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this InlineResponseDefault. - - - :param error: The error of this InlineResponseDefault. - :type error: InlineResponseDefaultError - """ - if error is None: - raise ValueError("Invalid value for `error`, must not be `None`") - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py b/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py deleted file mode 100644 index 95b5cf26175..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/inline_response_default_error.py +++ /dev/null @@ -1,118 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .. import util - - -class InlineResponseDefaultError(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, message: str=None, errors: List[object]=None, status: int=None): - """InlineResponseDefaultError - a model defined in OpenAPI - - :param message: The message of this InlineResponseDefaultError. - :param errors: The errors of this InlineResponseDefaultError. - :param status: The status of this InlineResponseDefaultError. - """ - self.openapi_types = { - 'message': str, - 'errors': List[object], - 'status': int - } - - self.attribute_map = { - 'message': 'message', - 'errors': 'errors', - 'status': 'status' - } - - self._message = message - self._errors = errors - self._status = status - - @classmethod - def from_dict(cls, dikt: dict) -> 'InlineResponseDefaultError': - """Returns the dict as a model - - :param dikt: A dict. - :return: The inline_response_default_error of this InlineResponseDefaultError. - """ - return util.deserialize_model(dikt, cls) - - @property - def message(self): - """Gets the message of this InlineResponseDefaultError. - - Error message - - :return: The message of this InlineResponseDefaultError. - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this InlineResponseDefaultError. - - Error message - - :param message: The message of this InlineResponseDefaultError. - :type message: str - """ - if message is None: - raise ValueError("Invalid value for `message`, must not be `None`") - - self._message = message - - @property - def errors(self): - """Gets the errors of this InlineResponseDefaultError. - - - :return: The errors of this InlineResponseDefaultError. - :rtype: List[object] - """ - return self._errors - - @errors.setter - def errors(self, errors): - """Sets the errors of this InlineResponseDefaultError. - - - :param errors: The errors of this InlineResponseDefaultError. - :type errors: List[object] - """ - - self._errors = errors - - @property - def status(self): - """Gets the status of this InlineResponseDefaultError. - - Error code - - :return: The status of this InlineResponseDefaultError. - :rtype: int - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this InlineResponseDefaultError. - - Error code - - :param status: The status of this InlineResponseDefaultError. - :type status: int - """ - if status is None: - raise ValueError("Invalid value for `status`, must not be `None`") - - self._status = status diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py deleted file mode 100644 index 2075fb9fd91..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/running_service_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class RunningServiceEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2003Data=None, error: object=None): - """RunningServiceEnveloped - a model defined in OpenAPI - - :param data: The data of this RunningServiceEnveloped. - :param error: The error of this RunningServiceEnveloped. - """ - self.openapi_types = { - 'data': InlineResponse2003Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'RunningServiceEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The RunningServiceEnveloped of this RunningServiceEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this RunningServiceEnveloped. - - - :return: The data of this RunningServiceEnveloped. - :rtype: InlineResponse2003Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this RunningServiceEnveloped. - - - :param data: The data of this RunningServiceEnveloped. - :type data: InlineResponse2003Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this RunningServiceEnveloped. - - - :return: The error of this RunningServiceEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this RunningServiceEnveloped. - - - :param error: The error of this RunningServiceEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py deleted file mode 100644 index 104508d8de4..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/running_services_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2003_data import InlineResponse2003Data -from .. import util - - -class RunningServicesEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[InlineResponse2003Data]=None, error: object=None): - """RunningServicesEnveloped - a model defined in OpenAPI - - :param data: The data of this RunningServicesEnveloped. - :param error: The error of this RunningServicesEnveloped. - """ - self.openapi_types = { - 'data': List[InlineResponse2003Data], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'RunningServicesEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The RunningServicesEnveloped of this RunningServicesEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this RunningServicesEnveloped. - - - :return: The data of this RunningServicesEnveloped. - :rtype: List[InlineResponse2003Data] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this RunningServicesEnveloped. - - - :param data: The data of this RunningServicesEnveloped. - :type data: List[InlineResponse2003Data] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this RunningServicesEnveloped. - - - :return: The error of this RunningServicesEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this RunningServicesEnveloped. - - - :param error: The error of this RunningServicesEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py deleted file mode 100644 index dcb444a8725..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/service_extras_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2002_data import InlineResponse2002Data -from .. import util - - -class ServiceExtrasEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: InlineResponse2002Data=None, error: object=None): - """ServiceExtrasEnveloped - a model defined in OpenAPI - - :param data: The data of this ServiceExtrasEnveloped. - :param error: The error of this ServiceExtrasEnveloped. - """ - self.openapi_types = { - 'data': InlineResponse2002Data, - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'ServiceExtrasEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The ServiceExtrasEnveloped of this ServiceExtrasEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this ServiceExtrasEnveloped. - - - :return: The data of this ServiceExtrasEnveloped. - :rtype: InlineResponse2002Data - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this ServiceExtrasEnveloped. - - - :param data: The data of this ServiceExtrasEnveloped. - :type data: InlineResponse2002Data - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this ServiceExtrasEnveloped. - - - :return: The error of this ServiceExtrasEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ServiceExtrasEnveloped. - - - :param error: The error of this ServiceExtrasEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py b/services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py deleted file mode 100644 index b101b17ecb8..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/services_enveloped.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .simcore_node import SimcoreNode -from .. import util - - -class ServicesEnveloped(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__(self, data: List[SimcoreNode]=None, error: object=None): - """ServicesEnveloped - a model defined in OpenAPI - - :param data: The data of this ServicesEnveloped. - :param error: The error of this ServicesEnveloped. - """ - self.openapi_types = { - 'data': List[SimcoreNode], - 'error': object - } - - self.attribute_map = { - 'data': 'data', - 'error': 'error' - } - - self._data = data - self._error = error - - @classmethod - def from_dict(cls, dikt: dict) -> 'ServicesEnveloped': - """Returns the dict as a model - - :param dikt: A dict. - :return: The ServicesEnveloped of this ServicesEnveloped. - """ - return util.deserialize_model(dikt, cls) - - @property - def data(self): - """Gets the data of this ServicesEnveloped. - - - :return: The data of this ServicesEnveloped. - :rtype: List[SimcoreNode] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this ServicesEnveloped. - - - :param data: The data of this ServicesEnveloped. - :type data: List[SimcoreNode] - """ - if data is None: - raise ValueError("Invalid value for `data`, must not be `None`") - - self._data = data - - @property - def error(self): - """Gets the error of this ServicesEnveloped. - - - :return: The error of this ServicesEnveloped. - :rtype: object - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ServicesEnveloped. - - - :param error: The error of this ServicesEnveloped. - :type error: object - """ - - self._error = error diff --git a/services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py b/services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py deleted file mode 100644 index 2601375f1b8..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/models/simcore_node.py +++ /dev/null @@ -1,447 +0,0 @@ -# coding: utf-8 - -from datetime import date, datetime - -from typing import List, Dict, Type - -from .base_model_ import Model -from .inline_response2001_authors import InlineResponse2001Authors -from .inline_response2001_badges import InlineResponse2001Badges -import re -from .. import util - - -class SimcoreNode(Model): - """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - - Do not edit the class manually. - """ - - def __init__( - self, - key: str = None, - integration_version: str = None, - version: str = None, - type: str = None, - name: str = None, - thumbnail: str = None, - badges: List[InlineResponse2001Badges] = None, - description: str = None, - authors: List[InlineResponse2001Authors] = None, - contact: str = None, - inputs: Dict[str, object] = None, - outputs: Dict[str, object] = None, - boot_options: Dict[str, object] = None, - ): - """SimcoreNode - a model defined in OpenAPI - - :param key: The key of this SimcoreNode. - :param integration_version: The integration_version of this SimcoreNode. - :param version: The version of this SimcoreNode. - :param type: The type of this SimcoreNode. - :param name: The name of this SimcoreNode. - :param thumbnail: The thumbnail of this SimcoreNode. - :param badges: The badges of this SimcoreNode. - :param description: The description of this SimcoreNode. - :param authors: The authors of this SimcoreNode. - :param contact: The contact of this SimcoreNode. - :param inputs: The inputs of this SimcoreNode. - :param outputs: The outputs of this SimcoreNode. - :param boot_options: The boot_options of this SimcoreNode. - """ - self.openapi_types = { - "key": str, - "integration_version": str, - "version": str, - "type": str, - "name": str, - "thumbnail": str, - "badges": List[InlineResponse2001Badges], - "description": str, - "authors": List[InlineResponse2001Authors], - "contact": str, - "inputs": Dict[str, object], - "outputs": Dict[str, object], - "boot_options": Dict[str, object], - } - - self.attribute_map = { - "key": "key", - "integration_version": "integration-version", - "version": "version", - "type": "type", - "name": "name", - "thumbnail": "thumbnail", - "badges": "badges", - "description": "description", - "authors": "authors", - "contact": "contact", - "inputs": "inputs", - "outputs": "outputs", - "boot_options": "boot-options", - } - - self._key = key - self._integration_version = integration_version - self._version = version - self._type = type - self._name = name - self._thumbnail = thumbnail - self._badges = badges - self._description = description - self._authors = authors - self._contact = contact - self._inputs = inputs - self._outputs = outputs - self._boot_options = boot_options - - @classmethod - def from_dict(cls, dikt: dict) -> "SimcoreNode": - """Returns the dict as a model - - :param dikt: A dict. - :return: The simcore_node of this SimcoreNode. - """ - return util.deserialize_model(dikt, cls) - - @property - def key(self): - """Gets the key of this SimcoreNode. - - distinctive name for the node based on the docker registry path - - :return: The key of this SimcoreNode. - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this SimcoreNode. - - distinctive name for the node based on the docker registry path - - :param key: The key of this SimcoreNode. - :type key: str - """ - if key is None: - raise ValueError("Invalid value for `key`, must not be `None`") - if key is not None and not re.search( - r"^simcore/services/" - r"(?P(comp|dynamic|frontend))/" - r"(?P[a-z0-9][a-z0-9_.-]*/)*" - r"(?P[a-z0-9-_]+[a-z0-9])$", - key, - ): - raise ValueError( - r"Invalid value for `key`, must be a follow pattern or equal to `/^(simcore)\/(services)\/(comp|dynamic|frontend)(\/[\w\/-]+)+$/`" - ) - - self._key = key - - @property - def integration_version(self): - """Gets the integration_version of this SimcoreNode. - - integration version number - - :return: The integration_version of this SimcoreNode. - :rtype: str - """ - return self._integration_version - - @integration_version.setter - def integration_version(self, integration_version): - """Sets the integration_version of this SimcoreNode. - - integration version number - - :param integration_version: The integration_version of this SimcoreNode. - :type integration_version: str - """ - if integration_version is not None and not re.search( - r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$", - integration_version, - ): - raise ValueError( - r"Invalid value for `integration_version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`" - ) - - self._integration_version = integration_version - - @property - def version(self): - """Gets the version of this SimcoreNode. - - service version number - - :return: The version of this SimcoreNode. - :rtype: str - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this SimcoreNode. - - service version number - - :param version: The version of this SimcoreNode. - :type version: str - """ - if version is None: - raise ValueError("Invalid value for `version`, must not be `None`") - if version is not None and not re.search( - r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$", - version, - ): - raise ValueError( - r"Invalid value for `version`, must be a follow pattern or equal to `/^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$/`" - ) - - self._version = version - - @property - def type(self): - """Gets the type of this SimcoreNode. - - service type - - :return: The type of this SimcoreNode. - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this SimcoreNode. - - service type - - :param type: The type of this SimcoreNode. - :type type: str - """ - allowed_values = ["frontend", "computational", "dynamic"] - if type not in allowed_values: - raise ValueError( - "Invalid value for `type` ({0}), must be one of {1}".format( - type, allowed_values - ) - ) - - self._type = type - - @property - def name(self): - """Gets the name of this SimcoreNode. - - short, human readable name for the node - - :return: The name of this SimcoreNode. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this SimcoreNode. - - short, human readable name for the node - - :param name: The name of this SimcoreNode. - :type name: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") - - self._name = name - - @property - def thumbnail(self): - """Gets the thumbnail of this SimcoreNode. - - url to the thumbnail - - :return: The thumbnail of this SimcoreNode. - :rtype: str - """ - return self._thumbnail - - @thumbnail.setter - def thumbnail(self, thumbnail): - """Sets the thumbnail of this SimcoreNode. - - url to the thumbnail - - :param thumbnail: The thumbnail of this SimcoreNode. - :type thumbnail: str - """ - - self._thumbnail = thumbnail - - @property - def badges(self): - """Gets the badges of this SimcoreNode. - - - :return: The badges of this SimcoreNode. - :rtype: List[InlineResponse2001Badges] - """ - return self._badges - - @badges.setter - def badges(self, badges): - """Sets the badges of this SimcoreNode. - - - :param badges: The badges of this SimcoreNode. - :type badges: List[InlineResponse2001Badges] - """ - - self._badges = badges - - @property - def description(self): - """Gets the description of this SimcoreNode. - - human readable description of the purpose of the node - - :return: The description of this SimcoreNode. - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this SimcoreNode. - - human readable description of the purpose of the node - - :param description: The description of this SimcoreNode. - :type description: str - """ - if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") - - self._description = description - - @property - def authors(self): - """Gets the authors of this SimcoreNode. - - - :return: The authors of this SimcoreNode. - :rtype: List[InlineResponse2001Authors] - """ - return self._authors - - @authors.setter - def authors(self, authors): - """Sets the authors of this SimcoreNode. - - - :param authors: The authors of this SimcoreNode. - :type authors: List[InlineResponse2001Authors] - """ - if authors is None: - raise ValueError("Invalid value for `authors`, must not be `None`") - - self._authors = authors - - @property - def contact(self): - """Gets the contact of this SimcoreNode. - - email to correspond to the authors about the node - - :return: The contact of this SimcoreNode. - :rtype: str - """ - return self._contact - - @contact.setter - def contact(self, contact): - """Sets the contact of this SimcoreNode. - - email to correspond to the authors about the node - - :param contact: The contact of this SimcoreNode. - :type contact: str - """ - if contact is None: - raise ValueError("Invalid value for `contact`, must not be `None`") - - self._contact = contact - - @property - def inputs(self): - """Gets the inputs of this SimcoreNode. - - definition of the inputs of this node - - :return: The inputs of this SimcoreNode. - :rtype: Dict[str, object] - """ - return self._inputs - - @inputs.setter - def inputs(self, inputs): - """Sets the inputs of this SimcoreNode. - - definition of the inputs of this node - - :param inputs: The inputs of this SimcoreNode. - :type inputs: Dict[str, object] - """ - if inputs is None: - raise ValueError("Invalid value for `inputs`, must not be `None`") - - self._inputs = inputs - - @property - def outputs(self): - """Gets the outputs of this SimcoreNode. - - definition of the outputs of this node - - :return: The outputs of this SimcoreNode. - :rtype: Dict[str, object] - """ - return self._outputs - - @outputs.setter - def outputs(self, outputs): - """Sets the outputs of this SimcoreNode. - - definition of the outputs of this node - - :param outputs: The outputs of this SimcoreNode. - :type outputs: Dict[str, object] - """ - if outputs is None: - raise ValueError("Invalid value for `outputs`, must not be `None`") - - self._outputs = outputs - - @property - def boot_options(self): - """Gets the boot_options of this SimcoreNode. - - Service defined boot options. These get injected in the service as env variables. - - :return: The boot_options of this SimcoreNode. - :rtype: Dict[str, object] - """ - return self._boot_options - - @boot_options.setter - def boot_options(self, boot_options): - """Sets the boot_options of this SimcoreNode. - - Service defined boot options. These get injected in the service as env variables. - - :param boot_options: The boot_options of this SimcoreNode. - :type boot_options: Dict[str, object] - """ - - self._boot_options = boot_options diff --git a/services/director/src/simcore_service_director/rest/generated_code/routing.py b/services/director/src/simcore_service_director/rest/generated_code/routing.py deleted file mode 100644 index 3cf2a4d57b7..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/routing.py +++ /dev/null @@ -1,94 +0,0 @@ -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. - -use create_web_app to initialise the web application using the specification file. -The base folder is the root of the package. -""" - - -import logging -from pathlib import Path - -from aiohttp import hdrs, web -from aiohttp_apiset import SwaggerRouter -from aiohttp_apiset.exceptions import ValidationError -from aiohttp_apiset.middlewares import Jsonify, jsonify -from aiohttp_apiset.swagger.loader import ExtendedSchemaFile -from aiohttp_apiset.swagger.operations import OperationIdMapping - -from .. import handlers -from .models.base_model_ import Model - -log = logging.getLogger(__name__) - -@web.middleware -async def __handle_errors(request, handler): - try: - log.debug("error middleware handling request %s to handler %s", request, handler) - response = await handler(request) - return response - except ValidationError as ex: - # aiohttp apiset errors - log.exception("error happened in handling route") - error = dict(status=ex.status, message=ex.to_tree()) - error_enveloped = dict(error=error) - return web.json_response(error_enveloped, status=ex.status) - except web.HTTPError as ex: - log.exception("error happened in handling route") - error = dict(status=ex.status, message=str(ex.reason)) - error_enveloped = dict(data=error) - return web.json_response(error_enveloped, status=ex.status) - - -def create_web_app(base_folder, spec_file, additional_middlewares = None): - # create the default mapping of the operationId to the implementation code in handlers - opmap = __create_default_operation_mapping(Path(base_folder / spec_file)) - - # generate a version 3 of the API documentation - router = SwaggerRouter( - swagger_ui='/apidoc/', - version_ui=3, # forces the use of version 3 by default - search_dirs=[base_folder], - default_validate=True, - ) - - # add automatic jsonification of the models located in generated code - jsonify.singleton = Jsonify(indent=3, ensure_ascii=False) - jsonify.singleton.add_converter(Model, lambda o: o.to_dict(), score=0) - - middlewares = [jsonify, __handle_errors] - if additional_middlewares: - middlewares.extend(additional_middlewares) - # create the web application using the API - app = web.Application( - router=router, - middlewares=middlewares, - ) - router.set_cors(app, domains='*', headers=( - (hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, hdrs.AUTHORIZATION), - )) - - # Include our specifications in a router, - # is now available in the swagger-ui to the address http://localhost:8080/swagger/?spec=v1 - router.include( - spec=Path(base_folder / spec_file), - operationId_mapping=opmap, - name='v0', # name to access in swagger-ui, - basePath="/v0" # BUG: in apiset with openapi 3.0.0 [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) - ) - - return app - -def __create_default_operation_mapping(specs_file): - operation_mapping = {} - yaml_specs = ExtendedSchemaFile(specs_file) - paths = yaml_specs['paths'] - for path in paths.items(): - for method in path[1].items(): # can be get, post, patch, put, delete... - op_str = "operationId" - if op_str not in method[1]: - raise Exception("The API %s does not contain the operationId tag for route %s %s" % (specs_file, path[0], method[0])) - operation_id = method[1][op_str] - operation_mapping[operation_id] = getattr(handlers, operation_id) - return OperationIdMapping(**operation_mapping) diff --git a/services/director/src/simcore_service_director/rest/generated_code/typing_utils.py b/services/director/src/simcore_service_director/rest/generated_code/typing_utils.py deleted file mode 100644 index 0563f81fd53..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/typing_utils.py +++ /dev/null @@ -1,32 +0,0 @@ -# coding: utf-8 - -import sys - -if sys.version_info < (3, 7): - import typing - - def is_generic(klass): - """ Determine whether klass is a generic class """ - return type(klass) == typing.GenericMeta - - def is_dict(klass): - """ Determine whether klass is a Dict """ - return klass.__extra__ == dict - - def is_list(klass): - """ Determine whether klass is a List """ - return klass.__extra__ == list - -else: - - def is_generic(klass): - """ Determine whether klass is a generic class """ - return hasattr(klass, '__origin__') - - def is_dict(klass): - """ Determine whether klass is a Dict """ - return klass.__origin__ == dict - - def is_list(klass): - """ Determine whether klass is a List """ - return klass.__origin__ == list diff --git a/services/director/src/simcore_service_director/rest/generated_code/util.py b/services/director/src/simcore_service_director/rest/generated_code/util.py deleted file mode 100644 index a9ab1e81939..00000000000 --- a/services/director/src/simcore_service_director/rest/generated_code/util.py +++ /dev/null @@ -1,131 +0,0 @@ -import datetime - -import typing -from typing import Union -from . import typing_utils - -T = typing.TypeVar('T') -Class = typing.Type[T] - - -def _deserialize(data: Union[dict, list, str], klass: Union[Class, str]) -> Union[dict, list, Class, int, float, str, bool, datetime.date, datetime.datetime]: - """Deserializes dict, list, str into an object. - - :param data: dict, list or str. - :param klass: class literal, or string of class name. - - :return: object. - """ - if data is None: - return None - - if klass in (int, float, str, bool): - return _deserialize_primitive(data, klass) - elif klass == object: - return _deserialize_object(data) - elif klass == datetime.date: - return deserialize_date(data) - elif klass == datetime.datetime: - return deserialize_datetime(data) - elif typing_utils.is_generic(klass): - if typing_utils.is_list(klass): - return _deserialize_list(data, klass.__args__[0]) - if typing_utils.is_dict(klass): - return _deserialize_dict(data, klass.__args__[1]) - else: - return deserialize_model(data, klass) - - -def _deserialize_primitive(data, klass: Class) -> Union[Class, int, float, str, bool]: - """Deserializes to primitive type. - - :param data: data to deserialize. - :param klass: class literal. - - :return: int, float, str, bool. - """ - try: - value = klass(data) - except (UnicodeEncodeError, TypeError): - value = data - return value - - -def _deserialize_object(value: T) -> T: - """Return an original value. - - :return: object. - """ - return value - - -def deserialize_date(string: str) -> datetime.date: - """Deserializes string to date. - - :param string: str. - :return: date. - """ - try: - from dateutil.parser import parse - return parse(string).date() - except ImportError: - return string - - -def deserialize_datetime(string: str) -> datetime.datetime: - """Deserializes string to datetime. - - The string should be in iso8601 datetime format. - - :param string: str. - :return: datetime. - """ - try: - from dateutil.parser import parse - return parse(string) - except ImportError: - return string - - -def deserialize_model(data: Union[dict, list], klass: T) -> T: - """Deserializes list or dict to model. - - :param data: dict, list. - :param klass: class literal. - :return: model object. - """ - instance = klass() - - if not instance.openapi_types: - return data - - if data is not None and isinstance(data, (list, dict)): - for attr, attr_type in instance.openapi_types.items(): - attr_key = instance.attribute_map[attr] - if attr_key in data: - value = data[attr_key] - setattr(instance, attr, _deserialize(value, attr_type)) - - return instance - - -def _deserialize_list(data: list, boxed_type) -> list: - """Deserializes a list and its elements. - - :param data: list to deserialize. - :param boxed_type: class literal. - - :return: deserialized list. - """ - return [_deserialize(sub_data, boxed_type) for sub_data in data] - - -def _deserialize_dict(data: dict, boxed_type) -> dict: - """Deserializes a dict and its elements. - - :param data: dict to deserialize. - :param boxed_type: class literal. - - :return: deserialized dict. - """ - return {k: _deserialize(v, boxed_type) for k, v in data.items()} diff --git a/services/director/src/simcore_service_director/rest/handlers.py b/services/director/src/simcore_service_director/rest/handlers.py deleted file mode 100644 index 151f4e4299a..00000000000 --- a/services/director/src/simcore_service_director/rest/handlers.py +++ /dev/null @@ -1,238 +0,0 @@ -# pylint:disable=too-many-arguments - -import logging -from typing import Optional - -import pkg_resources -import yaml -from aiohttp import web, web_exceptions -from simcore_service_director import exceptions, producer, registry_proxy, resources - -log = logging.getLogger(__name__) - - -async def root_get( - request: web.Request, -) -> web.Response: - log.debug("Client does root_get request %s", request) - distb = pkg_resources.get_distribution("simcore-service-director") - with resources.stream(resources.RESOURCE_OPEN_API) as file_ptr: - api_dict = yaml.safe_load(file_ptr) - - service_health = dict( - name=distb.project_name, - status="SERVICE_RUNNING", - api_version=api_dict["info"]["version"], - version=distb.version, - ) - return web.json_response(data=dict(data=service_health)) - - -async def services_get( - request: web.Request, service_type: Optional[str] = None -) -> web.Response: - log.debug( - "Client does services_get request %s with service_type %s", - request, - service_type, - ) - try: - services = [] - if not service_type: - services = await registry_proxy.list_services( - request.app, registry_proxy.ServiceType.ALL - ) - elif "computational" in service_type: - services = await registry_proxy.list_services( - request.app, registry_proxy.ServiceType.COMPUTATIONAL - ) - elif "interactive" in service_type: - services = await registry_proxy.list_services( - request.app, registry_proxy.ServiceType.DYNAMIC - ) - # NOTE: the validation is done in the catalog. This entrypoint IS and MUST BE only used by the catalog!! - # NOTE2: the catalog will directly talk to the registry see case #2165 [https://github.com/ITISFoundation/osparc-simcore/issues/2165] - # services = node_validator.validate_nodes(services) - return web.json_response(data=dict(data=services)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def services_by_key_version_get( - request: web.Request, service_key: str, service_version: str -) -> web.Response: - log.debug( - "Client does services_get request %s with service_key %s, service_version %s", - request, - service_key, - service_version, - ) - try: - services = [ - await registry_proxy.get_image_details( - request.app, service_key, service_version - ) - ] - return web.json_response(data=dict(data=services)) - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def get_service_labels( - request: web.Request, service_key: str, service_version: str -) -> web.Response: - # GET /services/{service_key}/{service_version}/labels - - log.debug( - "Retrieving service labels %s with service_key %s, service_version %s", - request, - service_key, - service_version, - ) - try: - service_labels, _ = await registry_proxy.get_image_labels( - request.app, service_key, service_version - ) - return web.json_response(data=dict(data=service_labels)) - - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def service_extras_by_key_version_get( - request: web.Request, service_key: str, service_version: str -) -> web.Response: - # GET /service_extras/{service_key}/{service_version} - log.debug( - "Client does service_extras_by_key_version_get request %s with service_key %s, service_version %s", - request, - service_key, - service_version, - ) - try: - service_extras = await registry_proxy.get_service_extras( - request.app, service_key, service_version - ) - return web.json_response(data=dict(data=service_extras)) - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_list_get( - request: web.Request, user_id: str, project_id: str -) -> web.Response: - log.debug( - "Client does running_interactive_services_list_get request %s, user_id %s, project_id %s", - request, - user_id, - project_id, - ) - try: - service = await producer.get_services_details(request.app, user_id, project_id) - return web.json_response(data=dict(data=service), status=200) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_post( - request: web.Request, - user_id: str, - project_id: str, - service_key: str, - service_uuid: str, - service_tag: str, - service_basepath: str, -) -> web.Response: - # NOTE: servicelib is not present here - request_simcore_user_agent = request.headers.get("X-Simcore-User-Agent", "") - log.debug( - "Client does running_interactive_services_post request %s with user_id %s, project_id %s, service %s:%s, service_uuid %s, service_basepath %s, request_simcore_user_agent %s", - request, - user_id, - project_id, - service_key, - service_tag, - service_uuid, - service_basepath, - request_simcore_user_agent, - ) - try: - service = await producer.start_service( - request.app, - user_id, - project_id, - service_key, - service_tag, - service_uuid, - service_basepath, - request_simcore_user_agent, - ) - return web.json_response(data=dict(data=service), status=201) - except exceptions.ServiceStartTimeoutError as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - except exceptions.ServiceNotAvailableError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except exceptions.ServiceUUIDInUseError as err: - raise web_exceptions.HTTPConflict(reason=str(err)) - except exceptions.RegistryConnectionError as err: - raise web_exceptions.HTTPUnauthorized(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_get( - request: web.Request, service_uuid: str -) -> web.Response: - log.debug( - "Client does running_interactive_services_get request %s with service_uuid %s", - request, - service_uuid, - ) - try: - service = await producer.get_service_details(request.app, service_uuid) - return web.json_response(data=dict(data=service), status=200) - except exceptions.ServiceUUIDNotFoundError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except Exception as err: - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - -async def running_interactive_services_delete( - request: web.Request, service_uuid: str, save_state: Optional[bool] = True -) -> web.Response: - log.debug( - "Client does running_interactive_services_delete request %s with service_uuid %s", - request, - service_uuid, - ) - try: - await producer.stop_service(request.app, service_uuid, save_state) - - except exceptions.ServiceUUIDNotFoundError as err: - raise web_exceptions.HTTPNotFound(reason=str(err)) - except Exception as err: - # server errors are logged (>=500) - log.exception( - "Failed to delete dynamic service %s (save_state=%s)", - service_uuid, - save_state, - ) - raise web_exceptions.HTTPInternalServerError(reason=str(err)) - - return web.json_response(status=204) From 6b43e88fcaa69bc4ef0f1f185edad3a60ed4e0f0 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:57:15 +0100 Subject: [PATCH 045/201] drafted requirements --- services/director/requirements/Makefile | 7 - services/director/requirements/_base.in | 77 ++----- services/director/requirements/_base.txt | 152 ------------- services/director/requirements/_test.in | 31 +-- services/director/requirements/_test.txt | 251 ---------------------- services/director/requirements/_tools.in | 6 +- services/director/requirements/_tools.txt | 45 ---- services/director/requirements/ci.txt | 10 +- services/director/requirements/dev.txt | 7 + services/director/requirements/prod.txt | 7 +- 10 files changed, 49 insertions(+), 544 deletions(-) delete mode 100644 services/director/requirements/_base.txt delete mode 100644 services/director/requirements/_test.txt delete mode 100644 services/director/requirements/_tools.txt diff --git a/services/director/requirements/Makefile b/services/director/requirements/Makefile index 7aacec9e5ee..3f25442b790 100644 --- a/services/director/requirements/Makefile +++ b/services/director/requirements/Makefile @@ -4,10 +4,3 @@ include ../../../requirements/base.Makefile # Add here any extra explicit dependency: e.g. _migration.txt: _base.txt - - -_test.txt: _base.txt _test.in - ## NOTE: this recipe override has to be removed - ## to execute target upgrades e.g. due to vulnerability of - ## a library. - @echo INFO: test.txt is frozen. Skipping upgrade. diff --git a/services/director/requirements/_base.in b/services/director/requirements/_base.in index 0618d6c7759..2c0510eb962 100644 --- a/services/director/requirements/_base.in +++ b/services/director/requirements/_base.in @@ -1,70 +1,19 @@ # -# Specifies third-party dependencies for 'director' +# Specifies third-party dependencies for 'services/web/server/src' # +--constraint ../../../requirements/constraints.txt -# IMPORTANT: All requirements (including the packages in this repository) as FROZEN to those in itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# - current service is going to be replaced by director-v2 -# -# +# intra-repo required dependencies +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in -# This list was obtained as follows -# -# $ docker pull itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0: Pulling from itisfoundation/director -# Digest: sha256:84ba999ca348bf9d56d9ef0af2e3494ede0cd06d357d289e2a09a4191e7a56d3 -# Status: Image is up to date for itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# docker.io/itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 -# -# $ docker inspect itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0| jq '.[0] | .RepoTags, .ContainerConfig.Labels' -# [ -# "itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0" -# ] -# { -# "io.osparc.api-version": "0.1.0", -# "maintainer": "sanderegg", -# "org.label-schema.build-date": "2020-11-05T14:02:31Z", -# "org.label-schema.schema-version": "1.0", -# "org.label-schema.vcs-ref": "c8669fb", -# "org.label-schema.vcs-url": "https://github.com/ITISFoundation/osparc-simcore.git" -# } -# -# $ docker run -it itisfoundation/director:master-2020-11-05--14-45.c8669fb52659b684514fefa4f3b4599f57f276a0 pip freeze -# +# service-library[fastapi] +--requirement ../../../packages/service-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_fastapi.in -aiodebug==1.1.2 -aiodocker==0.14.0 -aiohttp==3.3.2 -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df -aiopg==1.0.0 -aiozipkin==0.7.1 -async-generator==1.10 -async-timeout==3.0.1 -asyncio-extras==1.3.2 -attrs==20.2.0 -certifi==2019.3.9 -chardet==3.0.4 -dataclasses==0.7 -idna==2.8 -idna-ssl==1.1.0 -isodate==0.6.0 -jsonschema==2.6.0 -lazy-object-proxy==1.4.3 -multidict==4.5.2 -openapi-core==0.12.0 -openapi-spec-validator==0.2.9 -prometheus-client==0.8.0 -psycopg2-binary==2.8.6 -pydantic==1.7.2 -PyYAML==5.4 # CVE-2020-1747 -requests==2.27.1 # -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library -six==1.12.0 -SQLAlchemy==1.3.20 -strict-rfc3339==0.7 -tenacity==6.0.0 -trafaret==2.1.0 -ujson==4.0.1 -urllib3==1.26.5 # CVE-2021-33503 -Werkzeug==1.0.1 -yarl==1.3.0 +aiodocker +fastapi[all] +httpx +prometheus-client +pydantic diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt deleted file mode 100644 index 2c6e016526f..00000000000 --- a/services/director/requirements/_base.txt +++ /dev/null @@ -1,152 +0,0 @@ -aiodebug==1.1.2 - # via - # -r requirements/_base.in - # simcore-service-library -aiodocker==0.14.0 - # via -r requirements/_base.in -aiohttp==3.3.2 - # via - # -r requirements/_base.in - # aiodocker - # aiohttp-apiset - # aiozipkin - # simcore-service-library -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df - # via -r requirements/_base.in -aiopg==1.0.0 - # via - # -r requirements/_base.in - # simcore-service-library -aiozipkin==0.7.1 - # via - # -r requirements/_base.in - # simcore-service-library -async-generator==1.10 - # via - # -r requirements/_base.in - # asyncio-extras -async-timeout==3.0.1 - # via - # -r requirements/_base.in - # aiohttp -asyncio-extras==1.3.2 - # via -r requirements/_base.in -attrs==20.2.0 - # via - # -r requirements/_base.in - # aiohttp - # openapi-core - # simcore-service-library -certifi==2019.3.9 - # via - # -r requirements/_base.in - # requests -chardet==3.0.4 - # via - # -r requirements/_base.in - # aiohttp -charset-normalizer==2.0.12 - # via requests -dataclasses==0.7 - # via -r requirements/_base.in -idna==2.8 - # via - # -r requirements/_base.in - # idna-ssl - # requests - # yarl -idna-ssl==1.1.0 - # via - # -r requirements/_base.in - # aiohttp -isodate==0.6.0 - # via - # -r requirements/_base.in - # openapi-core -jsonschema==2.6.0 - # via - # -r requirements/_base.in - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -lazy-object-proxy==1.4.3 - # via - # -r requirements/_base.in - # openapi-core - # simcore-service-library -multidict==4.5.2 - # via - # -r requirements/_base.in - # aiohttp - # yarl -openapi-core==0.12.0 - # via - # -r requirements/_base.in - # simcore-service-library -openapi-spec-validator==0.2.9 - # via - # -r requirements/_base.in - # openapi-core -prometheus-client==0.8.0 - # via - # -r requirements/_base.in - # simcore-service-library -psycopg2-binary==2.8.6 - # via - # -r requirements/_base.in - # aiopg - # simcore-service-library -pydantic==1.7.2 - # via - # -r requirements/_base.in - # simcore-service-library -pyyaml==5.4 - # via - # -r requirements/_base.in - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -requests==2.27.1 - # via -r requirements/_base.in -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#egg=simcore-service-library&subdirectory=packages/service-library - # via -r requirements/_base.in -six==1.12.0 - # via - # -r requirements/_base.in - # isodate - # openapi-core - # openapi-spec-validator - # tenacity -sqlalchemy==1.3.20 - # via - # -r requirements/_base.in - # simcore-service-library -strict-rfc3339==0.7 - # via - # -r requirements/_base.in - # openapi-core -tenacity==6.0.0 - # via - # -r requirements/_base.in - # simcore-service-library -trafaret==2.1.0 - # via - # -r requirements/_base.in - # simcore-service-library -ujson==4.0.1 - # via - # -r requirements/_base.in - # simcore-service-library -urllib3==1.26.5 - # via - # -r requirements/_base.in - # requests -werkzeug==1.0.1 - # via - # -r requirements/_base.in - # simcore-service-library -yarl==1.3.0 - # via - # -r requirements/_base.in - # aiodocker - # aiohttp diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index d480d049a73..eafeb199342 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -1,32 +1,23 @@ +# Specifies dependencies required to run 'services/api-server/test' +# both for unit and integration tests!! # -# Specifies dependencies required to run 'director' -# - -# frozen specs ---requirement _base.txt +--constraint ../../../requirements/constraints.txt -# NOTE: -# FROZEN (see notes in _base.in) -# DO NOT CHANGE ANYTHING HERE. -# IT WON'T HAVE ANY EFFECT +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt # - -# FROZEN as well (DO NOT CHANGE anything in pytest-simcore, it will have no effect in the director package) -pytest-simcore @ git+https://github.com/ITISFoundation/osparc-simcore.git@79f866219bf650c5eeb4fcdf8f017319087c92c7#egg=pytest-simcore&subdirectory=packages/pytest-simcore - +--constraint _base.txt # testing -aioresponses -coverage==4.5.1 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 docker -openapi-spec-validator~=0.2 # TODO: this library is limiting jsonschema<3 -ptvsd -pylint +faker +jsonref pytest -pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 +pytest-asyncio pytest-cov +pytest-docker pytest-instafail pytest-mock pytest-runner pytest-sugar -python-dotenv +respx diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt deleted file mode 100644 index 97f4c9313fa..00000000000 --- a/services/director/requirements/_test.txt +++ /dev/null @@ -1,251 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.6 -# To update, run: -# -# pip-compile --output-file=requirements/_test.txt --strip-extras requirements/_test.in -# -aiodebug==1.1.2 - # via - # -r requirements/_base.txt - # simcore-service-library -aiodocker==0.14.0 - # via -r requirements/_base.txt -aiohttp==3.3.2 - # via - # -r requirements/_base.txt - # aiodocker - # aiohttp-apiset - # aioresponses - # aiozipkin - # pytest-aiohttp - # simcore-service-library -aiohttp-apiset @ git+https://github.com/ITISFoundation/aiohttp_apiset.git@5c8a61ceb6de7ed9e09db5b4609b458a0d3773df - # via -r requirements/_base.txt -aiopg==1.0.0 - # via - # -r requirements/_base.txt - # simcore-service-library -aioresponses==0.7.2 - # via -r requirements/_test.in -aiozipkin==0.7.1 - # via - # -r requirements/_base.txt - # simcore-service-library -astroid==2.4.2 - # via pylint -async-generator==1.10 - # via - # -r requirements/_base.txt - # asyncio-extras -async-timeout==3.0.1 - # via - # -r requirements/_base.txt - # aiohttp -asyncio-extras==1.3.2 - # via -r requirements/_base.txt -attrs==20.2.0 - # via - # -r requirements/_base.txt - # aiohttp - # openapi-core - # pytest - # simcore-service-library -certifi==2019.3.9 - # via - # -r requirements/_base.txt - # requests -chardet==3.0.4 - # via - # -r requirements/_base.txt - # aiohttp -charset-normalizer==2.0.12 - # via - # -r requirements/_base.txt - # requests -coverage==4.5.1 - # via - # -r requirements/_test.in - # coveralls - # pytest-cov -dataclasses==0.7 - # via - # -r requirements/_base.txt - # pydantic -docker==4.3.1 - # via -r requirements/_test.in -docopt==0.6.2 - # via coveralls -idna==2.8 - # via - # -r requirements/_base.txt - # idna-ssl - # requests - # yarl -idna-ssl==1.1.0 - # via - # -r requirements/_base.txt - # aiohttp -importlib-metadata==2.0.0 - # via - # pluggy - # pytest -iniconfig==1.1.1 - # via pytest -isodate==0.6.0 - # via - # -r requirements/_base.txt - # openapi-core -isort==5.6.4 - # via pylint -jsonschema==2.6.0 - # via - # -r requirements/_base.txt - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -lazy-object-proxy==1.4.3 - # via - # -r requirements/_base.txt - # astroid - # openapi-core - # simcore-service-library -mccabe==0.6.1 - # via pylint -multidict==4.5.2 - # via - # -r requirements/_base.txt - # aiohttp - # yarl -openapi-core==0.12.0 - # via - # -r requirements/_base.txt - # simcore-service-library -openapi-spec-validator==0.2.9 - # via - # -r requirements/_base.txt - # -r requirements/_test.in - # openapi-core -packaging==20.4 - # via - # pytest - # pytest-sugar -pluggy==0.13.1 - # via pytest -prometheus-client==0.8.0 - # via - # -r requirements/_base.txt - # simcore-service-library -psycopg2-binary==2.8.6 - # via - # -r requirements/_base.txt - # aiopg - # simcore-service-library -ptvsd==4.3.2 - # via -r requirements/_test.in -py==1.9.0 - # via pytest -pydantic==1.7.2 - # via - # -r requirements/_base.txt - # simcore-service-library -pylint==2.6.0 - # via -r requirements/_test.in -pyparsing==2.4.7 - # via packaging -pytest==6.1.2 - # via - # -r requirements/_test.in - # pytest-aiohttp - # pytest-cov - # pytest-instafail - # pytest-mock - # pytest-simcore - # pytest-sugar -pytest-aiohttp==0.3.0 - # via -r requirements/_test.in -pytest-cov==2.10.1 - # via -r requirements/_test.in -pytest-instafail==0.4.2 - # via -r requirements/_test.in -pytest-mock==3.3.1 - # via -r requirements/_test.in -pytest-runner==5.2 - # via -r requirements/_test.in -pytest-simcore @ git+https://github.com/ITISFoundation/osparc-simcore.git@79f866219bf650c5eeb4fcdf8f017319087c92c7#subdirectory=packages/pytest-simcore - # via -r requirements/_test.in -pytest-sugar==0.9.4 - # via -r requirements/_test.in -python-dotenv==0.15.0 - # via -r requirements/_test.in -pyyaml==5.4 - # via - # -r requirements/_base.txt - # aiohttp-apiset - # openapi-spec-validator - # simcore-service-library -requests==2.27.1 - # via - # -r requirements/_base.txt - # codecov - # coveralls - # docker -simcore-service-library @ git+https://github.com/ITISFoundation/osparc-simcore.git@c8669fb52659b684514fefa4f3b4599f57f276a0#subdirectory=packages/service-library - # via -r requirements/_base.txt -six==1.12.0 - # via - # -r requirements/_base.txt - # astroid - # docker - # isodate - # openapi-core - # openapi-spec-validator - # packaging - # tenacity - # websocket-client -sqlalchemy==1.3.20 - # via - # -r requirements/_base.txt - # simcore-service-library -strict-rfc3339==0.7 - # via - # -r requirements/_base.txt - # openapi-core -tenacity==6.0.0 - # via - # -r requirements/_base.txt - # simcore-service-library -termcolor==1.1.0 - # via pytest-sugar -toml==0.10.2 - # via - # pylint - # pytest -trafaret==2.1.0 - # via - # -r requirements/_base.txt - # simcore-service-library -typed-ast==1.4.1 - # via astroid -ujson==4.0.1 - # via - # -r requirements/_base.txt - # simcore-service-library -urllib3==1.26.5 - # via - # -r requirements/_base.txt - # requests -websocket-client==0.57.0 - # via docker -werkzeug==1.0.1 - # via - # -r requirements/_base.txt - # simcore-service-library -wrapt==1.12.1 - # via astroid -yarl==1.3.0 - # via - # -r requirements/_base.txt - # aiodocker - # aiohttp -zipp==3.4.0 - # via importlib-metadata diff --git a/services/director/requirements/_tools.in b/services/director/requirements/_tools.in index 05f1ab1646f..52a9a39d162 100644 --- a/services/director/requirements/_tools.in +++ b/services/director/requirements/_tools.in @@ -1,7 +1,7 @@ +--constraint ../../../requirements/constraints.txt --constraint _base.txt --constraint _test.txt +--requirement ../../../requirements/devenv.txt + watchdog[watchmedo] -black~=20.8b0 -pip-tools -bump2version diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt deleted file mode 100644 index 821e63f1a10..00000000000 --- a/services/director/requirements/_tools.txt +++ /dev/null @@ -1,45 +0,0 @@ -appdirs==1.4.4 - # via black -black==20.8b1 - # via -r requirements/_tools.in -bump2version==1.0.1 - # via -r requirements/_tools.in -click==8.0.3 - # via - # black - # pip-tools -mypy-extensions==0.4.3 - # via black -pathspec==0.9.0 - # via black -pep517==0.12.0 - # via pip-tools -pip==24.2 - # via pip-tools -pip-tools==6.4.0 - # via -r requirements/_tools.in -pyyaml==5.4 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # watchdog -regex==2022.1.18 - # via black -setuptools==75.2.0 - # via pip-tools -toml==0.10.2 - # via - # -c requirements/_test.txt - # black -tomli==1.2.3 - # via pep517 -typed-ast==1.4.1 - # via - # -c requirements/_test.txt - # black -typing-extensions==4.0.1 - # via black -watchdog==2.1.6 - # via -r requirements/_tools.in -wheel==0.37.1 - # via pip-tools diff --git a/services/director/requirements/ci.txt b/services/director/requirements/ci.txt index 8edcd5f2bfe..e805fec3802 100644 --- a/services/director/requirements/ci.txt +++ b/services/director/requirements/ci.txt @@ -7,7 +7,15 @@ # # installs base + tests requirements +--requirement _base.txt --requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +simcore-models-library @ ../../packages/models-library +pytest-simcore @ ../../packages/pytest-simcore/ +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ # installs current package -. +simcore-service-director @ . diff --git a/services/director/requirements/dev.txt b/services/director/requirements/dev.txt index dac3f0a494b..f278b7206fd 100644 --- a/services/director/requirements/dev.txt +++ b/services/director/requirements/dev.txt @@ -12,5 +12,12 @@ --requirement _test.txt --requirement _tools.txt + +# installs this repo's packages +--editable ../../packages/models-library +--editable ../../packages/pytest-simcore/ +--editable ../../packages/service-library[fastapi] +--editable ../../packages/settings-library/ + # installs current package --editable . diff --git a/services/director/requirements/prod.txt b/services/director/requirements/prod.txt index dc0ec561efe..8a8b1d29125 100644 --- a/services/director/requirements/prod.txt +++ b/services/director/requirements/prod.txt @@ -9,5 +9,10 @@ # installs base requirements --requirement _base.txt +# installs this repo's packages +simcore-models-library @ ../../packages/models-library +simcore-service-library[fastapi] @ ../../packages/service-library +simcore-settings-library @ ../../packages/settings-library/ + # installs current package -. +simcore-service-director @ . From 8d124c8cbcbeef5c43297355d329d9ed671baded Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:01:53 +0100 Subject: [PATCH 046/201] upgraded --- services/director/requirements/_base.txt | 379 ++++++++++++++++++++++ services/director/requirements/_test.txt | 110 +++++++ services/director/requirements/_tools.txt | 86 +++++ 3 files changed, 575 insertions(+) create mode 100644 services/director/requirements/_base.txt create mode 100644 services/director/requirements/_test.txt create mode 100644 services/director/requirements/_tools.txt diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt new file mode 100644 index 00000000000..ea607f5efec --- /dev/null +++ b/services/director/requirements/_base.txt @@ -0,0 +1,379 @@ +aio-pika==9.4.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiocache==0.12.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodocker==0.23.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_base.in +aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiormq==6.8.1 + # via aio-pika +aiosignal==1.3.1 + # via aiohttp +anyio==4.6.2.post1 + # via + # fast-depends + # faststream + # httpx + # starlette + # watchfiles +arrow==1.3.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +asgiref==3.8.1 + # via opentelemetry-instrumentation-asgi +attrs==24.2.0 + # via + # aiohttp + # jsonschema + # referencing +certifi==2024.8.30 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # httpcore + # httpx + # requests +charset-normalizer==3.4.0 + # via requests +click==8.1.7 + # via + # typer + # uvicorn +deprecated==1.2.14 + # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-semantic-conventions +dnspython==2.7.0 + # via email-validator +email-validator==2.2.0 + # via + # fastapi + # pydantic +fast-depends==2.4.12 + # via faststream +fastapi==0.99.1 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # prometheus-fastapi-instrumentator +faststream==0.5.28 + # via -r requirements/../../../packages/service-library/requirements/_base.in +frozenlist==1.5.0 + # via + # aiohttp + # aiosignal +googleapis-common-protos==1.65.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.67.1 + # via opentelemetry-exporter-otlp-proto-grpc +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.6 + # via httpx +httptools==0.6.4 + # via uvicorn +httpx==0.27.2 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # fastapi +idna==3.10 + # via + # anyio + # email-validator + # httpx + # requests + # yarl +importlib-metadata==8.4.0 + # via opentelemetry-api +itsdangerous==2.2.0 + # via fastapi +jinja2==3.1.4 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2023.7.1 + # via jsonschema +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +multidict==6.1.0 + # via + # aiohttp + # yarl +opentelemetry-api==1.27.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-otlp==1.27.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-exporter-otlp-proto-common==1.27.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.27.0 + # via opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http==1.27.0 + # via opentelemetry-exporter-otlp +opentelemetry-instrumentation==0.48b0 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-asgi==0.48b0 + # via opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-fastapi==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-requests==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.27.0 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.27.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-semantic-conventions==0.48b0 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.48b0 + # via + # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-requests +orjson==3.10.11 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # fastapi +pamqp==3.3.0 + # via aiormq +prometheus-client==0.21.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in + # prometheus-fastapi-instrumentator +prometheus-fastapi-instrumentator==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +propcache==0.2.0 + # via yarl +protobuf==4.25.5 + # via + # googleapis-common-protos + # opentelemetry-proto +psutil==6.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pydantic==1.10.18 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/_base.in + # fast-depends + # fastapi +pygments==2.18.0 + # via rich +pyinstrument==5.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +python-dateutil==2.9.0.post0 + # via arrow +python-dotenv==1.0.1 + # via uvicorn +python-multipart==0.0.17 + # via fastapi +pyyaml==6.0.2 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # fastapi + # uvicorn +redis==5.2.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +referencing==0.29.3 + # via + # -c requirements/../../../packages/service-library/requirements/./constraints.txt + # jsonschema + # jsonschema-specifications +repro-zipfile==0.3.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +requests==2.32.3 + # via opentelemetry-exporter-otlp-proto-http +rich==13.9.4 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer +rpds-py==0.20.1 + # via + # jsonschema + # referencing +setuptools==75.3.0 + # via opentelemetry-instrumentation +shellingham==1.5.4 + # via typer +six==1.16.0 + # via python-dateutil +sniffio==1.3.1 + # via + # anyio + # httpx +starlette==0.27.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +tenacity==9.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +toolz==1.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tqdm==4.66.6 + # via -r requirements/../../../packages/service-library/requirements/_base.in +typer==0.12.5 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20241003 + # via arrow +typing-extensions==4.12.2 + # via + # aiodebug + # fastapi + # faststream + # opentelemetry-sdk + # pydantic + # typer +ujson==5.10.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # fastapi +urllib3==2.2.3 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +uvicorn==0.32.0 + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # fastapi +uvloop==0.21.0 + # via uvicorn +watchfiles==0.24.0 + # via uvicorn +websockets==13.1 + # via uvicorn +wrapt==1.16.0 + # via + # deprecated + # opentelemetry-instrumentation +yarl==1.17.1 + # via + # aio-pika + # aiohttp + # aiormq +zipp==3.20.2 + # via importlib-metadata diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt new file mode 100644 index 00000000000..656c294334f --- /dev/null +++ b/services/director/requirements/_test.txt @@ -0,0 +1,110 @@ +anyio==4.6.2.post1 + # via + # -c requirements/_base.txt + # httpx +attrs==24.2.0 + # via + # -c requirements/_base.txt + # pytest-docker +certifi==2024.8.30 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # httpcore + # httpx + # requests +charset-normalizer==3.4.0 + # via + # -c requirements/_base.txt + # requests +coverage==7.6.4 + # via pytest-cov +docker==7.1.0 + # via -r requirements/_test.in +faker==30.8.2 + # via -r requirements/_test.in +h11==0.14.0 + # via + # -c requirements/_base.txt + # httpcore +httpcore==1.0.6 + # via + # -c requirements/_base.txt + # httpx +httpx==0.27.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # respx +idna==3.10 + # via + # -c requirements/_base.txt + # anyio + # httpx + # requests +iniconfig==2.0.0 + # via pytest +jsonref==1.1.0 + # via -r requirements/_test.in +packaging==24.1 + # via + # pytest + # pytest-sugar +pluggy==1.5.0 + # via pytest +pytest==8.3.3 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-cov + # pytest-docker + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-asyncio==0.23.8 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +pytest-cov==6.0.0 + # via -r requirements/_test.in +pytest-docker==3.1.1 + # via -r requirements/_test.in +pytest-instafail==0.5.0 + # via -r requirements/_test.in +pytest-mock==3.14.0 + # via -r requirements/_test.in +pytest-runner==6.0.1 + # via -r requirements/_test.in +pytest-sugar==1.0.0 + # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via + # -c requirements/_base.txt + # faker +requests==2.32.3 + # via + # -c requirements/_base.txt + # docker +respx==0.21.1 + # via -r requirements/_test.in +six==1.16.0 + # via + # -c requirements/_base.txt + # python-dateutil +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio + # httpx +termcolor==2.5.0 + # via pytest-sugar +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # faker +urllib3==2.2.3 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # docker + # requests diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt new file mode 100644 index 00000000000..3c83af3ad3e --- /dev/null +++ b/services/director/requirements/_tools.txt @@ -0,0 +1,86 @@ +astroid==3.3.5 + # via pylint +black==24.10.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # -c requirements/_base.txt + # black + # pip-tools +dill==0.3.9 + # via pylint +distlib==0.3.9 + # via virtualenv +filelock==3.16.1 + # via virtualenv +identify==2.6.1 + # via pre-commit +isort==5.13.2 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.13.0 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.0.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==24.1 + # via + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via black +pip==24.3.1 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.6 + # via + # black + # pylint + # virtualenv +pre-commit==4.0.1 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.1 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # pre-commit + # watchdog +ruff==0.7.2 + # via -r requirements/../../../requirements/devenv.txt +setuptools==75.3.0 + # via + # -c requirements/_base.txt + # pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.12.2 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.27.1 + # via pre-commit +watchdog==6.0.0 + # via -r requirements/_tools.in +wheel==0.44.0 + # via pip-tools From 94162ca30b1eabff0a101d4689a9d2aa67cd4b26 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:04:48 +0100 Subject: [PATCH 047/201] rm --- services/director/codegen.sh | 157 --------------------- services/director/temp_generate_openapi.sh | 37 ----- 2 files changed, 194 deletions(-) delete mode 100755 services/director/codegen.sh delete mode 100755 services/director/temp_generate_openapi.sh diff --git a/services/director/codegen.sh b/services/director/codegen.sh deleted file mode 100755 index bd5b6600cf6..00000000000 --- a/services/director/codegen.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/bash -# define the input specification file and the output directory -# typical structure: -# /src/package-name/.openapi/v1/package_api.yaml -- this is the input file -# /src/package-name/rest/generated_code -- this is the output directory -SOURCE_DIR=./src/simcore_service_director -API_VERSION=v0 -INPUT_SPEC=${SOURCE_DIR}/api/${API_VERSION}/openapi.yaml -OUTPUT_DIR=${SOURCE_DIR}/rest -OUTPUT_DIR_GEN=${SOURCE_DIR}/rest/generated_code -INIT_FILE_PATH=${OUTPUT_DIR}/__init__.py -HANDLERS_FILE_PATH=${OUTPUT_DIR}/handlers.py -ROUTING_FILE_PATH=${OUTPUT_DIR_GEN}/routing.py - -# create the folder for the output -mkdir -p $OUTPUT_DIR -# generate the python server models code -ABSOLUTE_INPUT_PATH=$(realpath "${INPUT_SPEC}") -ABSOLUTE_OUTPUT_DIR=$(realpath "${OUTPUT_DIR}") -ABSOLUTE_OUTPUT_DIR_GEN=$(realpath "${OUTPUT_DIR_GEN}") -../../scripts/openapi/openapi_python_server_codegen.sh -i ${ABSOLUTE_INPUT_PATH} -o ${ABSOLUTE_OUTPUT_DIR_GEN} -# replace import entries in python code -find ${OUTPUT_DIR_GEN}/models -type f -exec sed -i 's/openapi_server.models././g' {} \; -find ${OUTPUT_DIR_GEN}/models -type f -exec sed -i 's/openapi_server/../g' {} \; -find ${OUTPUT_DIR_GEN} -maxdepth 1 -type f -exec sed -i 's/openapi_server/./g' {} \; -# create __init__.py if always -cat > "${INIT_FILE_PATH}" << EOF -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. -""" -from .generated_code import ( - models, - util, - routing -) -EOF - -# only generate stub if necessary -if [ ! -e "${HANDLERS_FILE_PATH}" ]; then - cat > "${HANDLERS_FILE_PATH}" << EOF -"""This is a generated stub of handlers to be connected to the paths defined in the API - -""" -import logging - -from aiohttp import web_exceptions - -log = logging.getLogger(__name__) - -# This module shall contain the handlers of the API (implementation side of the openapi server side). -# Each operation is typically defined as -# async def root_get(request): -# return "hello API world" - -# The API shall define a path where the entry operationId: -# operationId: root_get -EOF -fi - -# always generate routing -cat > "${ROUTING_FILE_PATH}" << EOF -"""GENERATED CODE from codegen.sh -It is advisable to not modify this code if possible. -This will be overriden next time the code generator is called. - -use create_web_app to initialise the web application using the specification file. -The base folder is the root of the package. -""" - - -import logging -from pathlib import Path - -from aiohttp import hdrs, web -from aiohttp_apiset import SwaggerRouter -from aiohttp_apiset.exceptions import ValidationError -from aiohttp_apiset.middlewares import Jsonify, jsonify -from aiohttp_apiset.swagger.loader import ExtendedSchemaFile -from aiohttp_apiset.swagger.operations import OperationIdMapping - -from .. import handlers -from .models.base_model_ import Model - -log = logging.getLogger(__name__) - -@web.middleware -async def __handle_errors(request, handler): - try: - log.debug("error middleware handling request %s to handler %s", request, handler) - response = await handler(request) - return response - except ValidationError as ex: - # aiohttp apiset errors - log.exception("error happened in handling route") - error = dict(status=ex.status, message=ex.to_tree()) - error_enveloped = dict(error=error) - return web.json_response(error_enveloped, status=ex.status) - except web.HTTPError as ex: - log.exception("error happened in handling route") - error = dict(status=ex.status, message=str(ex.reason)) - error_enveloped = dict(data=error) - return web.json_response(error_enveloped, status=ex.status) - - -def create_web_app(base_folder, spec_file, additional_middlewares = None): - # create the default mapping of the operationId to the implementation code in handlers - opmap = __create_default_operation_mapping(Path(base_folder / spec_file)) - - # generate a version 3 of the API documentation - router = SwaggerRouter( - swagger_ui='/apidoc/', - version_ui=3, # forces the use of version 3 by default - search_dirs=[base_folder], - default_validate=True, - ) - - # add automatic jsonification of the models located in generated code - jsonify.singleton = Jsonify(indent=3, ensure_ascii=False) - jsonify.singleton.add_converter(Model, lambda o: o.to_dict(), score=0) - - middlewares = [jsonify, __handle_errors] - if additional_middlewares: - middlewares.extend(additional_middlewares) - # create the web application using the API - app = web.Application( - router=router, - middlewares=middlewares, - ) - router.set_cors(app, domains='*', headers=( - (hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, hdrs.AUTHORIZATION), - )) - - # Include our specifications in a router, - # is now available in the swagger-ui to the address http://localhost:8080/swagger/?spec=v1 - router.include( - spec=Path(base_folder / spec_file), - operationId_mapping=opmap, - name='v0', # name to access in swagger-ui, - basePath="/v0" # BUG: in apiset with openapi 3.0.0 [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) - ) - - return app - -def __create_default_operation_mapping(specs_file): - operation_mapping = {} - yaml_specs = ExtendedSchemaFile(specs_file) - paths = yaml_specs['paths'] - for path in paths.items(): - for method in path[1].items(): # can be get, post, patch, put, delete... - op_str = "operationId" - if op_str not in method[1]: - raise Exception("The API %s does not contain the operationId tag for route %s %s" % (specs_file, path[0], method[0])) - operation_id = method[1][op_str] - operation_mapping[operation_id] = getattr(handlers, operation_id) - return OperationIdMapping(**operation_mapping) -EOF diff --git a/services/director/temp_generate_openapi.sh b/services/director/temp_generate_openapi.sh deleted file mode 100755 index 533053087ef..00000000000 --- a/services/director/temp_generate_openapi.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -set -e - -cd $(dirname $0) -usage() -{ - echo "usage: temp_generate_openapi.sh [[[-i input]] | [-h help]]" -} - -apihub_specs_dir= -# process arguments -while [ "$1" != "" ]; do - case $1 in - -i | --input ) shift - apihub_specs_dir=$1 - ;; - -h | --help ) usage - exit - ;; - * ) usage - exit 1 - esac - shift -done - -if [ -z "$apihub_specs_dir" ]; then - echo "please define an apihub specs directory..." - usage - exit 1 -fi - -docker run \ - -v $apihub_specs_dir:/input \ - -v ${PWD}/src/simcore_service_director/api/v0:/output \ - itisfoundation/oas_resolver \ - /input/director/v0/openapi.yaml \ - /output/openapi.yaml From ca381d167cb203c1fa4fe40c9f132ea58abee812 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:13:10 +0100 Subject: [PATCH 048/201] setup --- services/director/Makefile | 10 ----- services/director/setup.cfg | 6 --- services/director/setup.py | 88 +++++++++++++++++++------------------ 3 files changed, 45 insertions(+), 59 deletions(-) diff --git a/services/director/Makefile b/services/director/Makefile index 0e91426d6d2..140d05c72d0 100644 --- a/services/director/Makefile +++ b/services/director/Makefile @@ -3,13 +3,3 @@ # include ../../scripts/common.Makefile include ../../scripts/common-service.Makefile - - -_check_python_version: - # Checking that runs with correct python version - @python3 -c "import sys; current_version=[int(d) for d in '3.6'.split('.')]; assert sys.version_info[:2]==tuple(current_version[:2]), f'Expected python $(EXPECTED_PYTHON_VERSION), got {sys.version_info}'" - - -.PHONY: openapi-specs -openapi-specs: ## updates and validates openapi specifications - $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 8e7e8ea592f..46680238b7d 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -5,10 +5,4 @@ message = director api version: {current_version} → {new_version} tag = False commit_args = --no-verify -[bumpversion:file:setup.py] -search = "{current_version}" -replace = "{new_version}" - [bumpversion:file:VERSION] -[bumpversion:file:../../api/specs/director/openapi.yaml] -[bumpversion:file:./src/simcore_service_director/api/v0/openapi.yaml] diff --git a/services/director/setup.py b/services/director/setup.py index 8c12d36f5cb..4d8fdfcc7e6 100644 --- a/services/director/setup.py +++ b/services/director/setup.py @@ -4,63 +4,65 @@ from setuptools import find_packages, setup -here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -if not (sys.version_info.major == 3 and sys.version_info.minor == 6): - raise RuntimeError( - "Requires <=3.6, got %s. Did you forget to activate virtualenv?" - % sys.version_info - ) +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } -def read_reqs(reqs_path: Path): - reqs = re.findall( - r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", reqs_path.read_text(), re.MULTILINE - ) - # TODO: temporary excluding requirements using git - # https://pip.pypa.io/en/stable/reference/pip_install/#vcs-support - return [r for r in reqs if not r.startswith("git")] +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -install_requirements = read_reqs(here / "requirements" / "_base.txt") + [ - "aiohttp-apiset", - "simcore-service-library", -] +NAME = "simcore-service-director" +VERSION = (CURRENT_DIR / "VERSION").read_text().strip() +AUTHORS = ("Sylvain Anderegg (sanderegg)",) +DESCRIPTION = "oSparc Director webserver service" +README = (CURRENT_DIR / "README.md").read_text() -test_requirements = read_reqs(here / "requirements" / "_test.txt") +PROD_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.txt") + | { + "simcore-models-library", + "simcore-service-library[fastapi]", + "simcore-settings-library", + } +) + +TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -_CONFIG = dict( - name="simcore-service-director", - version="0.1.0", - description="oSparc Director webserver service", - author="Sylvain Anderegg (sanderegg)", - python_requires="~=3.6", - packages=find_packages(where="src"), - package_dir={ + +SETUP = { + "name": NAME, + "version": VERSION, + "author": AUTHORS, + "description": DESCRIPTION, + "long_description": README, + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - include_package_data=True, - install_requires=install_requirements, - tests_require=test_requirements, - setup_requires=["pytest-runner"], - package_data={ - "": ["api/v0/openapi.yaml", "api/v0/schemas/*.json"], - }, - entry_points={ + "include_package_data": True, + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-director = simcore_service_director.__main__:main", "simcore-service = simcore_service_director.__main__:main", ], }, -) - - -def main(): - """Execute the setup commands.""" - setup(**_CONFIG) - return 0 # syccessful termination - +} if __name__ == "__main__": - raise SystemExit(main()) + setup(**SETUP) From 44db6cfb374140b89996f38baf104c9474f90c89 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:15:08 +0100 Subject: [PATCH 049/201] =?UTF-8?q?director=20api=20version:=200.1.0=20?= =?UTF-8?q?=E2=86=92=201.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/director/VERSION | 2 +- services/director/setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/VERSION b/services/director/VERSION index 6e8bf73aa55..3eefcb9dd5b 100644 --- a/services/director/VERSION +++ b/services/director/VERSION @@ -1 +1 @@ -0.1.0 +1.0.0 diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 46680238b7d..1eb089c0af8 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.1.0 +current_version = 1.0.0 commit = True message = director api version: {current_version} → {new_version} tag = False From 837bac1b11965b9ef4ea82129833155854b1c65d Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:18:35 +0100 Subject: [PATCH 050/201] unreadme --- services/director/README.md | 85 ------------------------------------- 1 file changed, 85 deletions(-) diff --git a/services/director/README.md b/services/director/README.md index 21146025f16..d919b4f4e98 100644 --- a/services/director/README.md +++ b/services/director/README.md @@ -1,86 +1 @@ # director - -[![Docker Pulls](https://img.shields.io/docker/pulls/itisfoundation/director.svg)](https://hub.docker.com/r/itisfoundation/director/tags) -[![](https://images.microbadger.com/badges/image/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") -[![](https://images.microbadger.com/badges/version/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") -[![](https://images.microbadger.com/badges/commit/itisfoundation/director.svg)](https://microbadger.com/images/itisfoundation/director "More on service image in registry") - - -## Usage - -```bash - # go to director folder - cd /services/director - # install - pip install . - # start director - simcore-service-director - # or - python -m simcore_service_director -``` - -## Development - -```bash - # go to director folder - cd /services/director - # install with symlinks - pip install -r requirements-dev.txt -``` - -The director implements a REST API defined in __/src/simcore_service_director/api/v1/openapi.yaml__. -First extend the API and validate the API before implementing any new route. - -## Current status - -End validation of the requests/responses is missing as some issues arose with using the openapi-core library. It seems it is not happy with referencing a json schema file. An issue was filed to see if something may be done quickly [github](https://github.com/p1c2u/openapi-core/issues/90). - -## docker - -- Uses multi-stage dockerfile to extend a common stack of layers into production or development images -- Main difference between development and production stages is whether the code gets copied or not inside of the image -- Development stage is set first to avoid re-building when files are changed -- ``boot.sh`` is necessary to activate the virtual environment inside of the docker - -```bash - - # development image - docker build --target development -t director:dev . - docker run -v %DIRECTOR_SRC_CODE:/home/scu/src director:dev - - # production image - docker build -t director:prod . - # or - docker build --target production -t director:prod . - docker run director:prod - -``` - -### local testing - -Using the main Makefile of the oSparc platform allows for testing the director: - -```bash - # go to root folder - make build-devel - # switch the docker swarm on in development mode - make up-devel -``` - -Then open [director-swagger-ui](http://localhost:8080/apidoc/) to see the director API and try out the different routes. - -## code generation from REST API "server side" - -Execute the following script for generating the necessary code server side - -```bash -./codegen.sh -``` - -NOTE: Issue #3 must still be taken care of manually! - -### Issues - -1. SwaggerRouter must be created with __version_ui__ set to 3 or the swagger ui must be access with ?version=3 -2. SwaggerRouter.include needs to have the argument __basePath__ filled to serve the API at the right location (ndlr /v1) [Github bug entry](https://github.com/aamalev/aiohttp_apiset/issues/45) -3. The generated models need to be manually corrected when the properties are __nullable__ as the code generator does add a check for __None__ value that triggers a ValueError exception even though the value is allowed to be null [Python server models generation issue with __nullable: true__ on GitHub](https://github.com/OpenAPITools/openapi-generator/issues/579) From 88fc379331f7c0175c63b7793fef2ab0b07bc54a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:57:16 +0100 Subject: [PATCH 051/201] entrpoint/boot --- services/director/docker/boot.sh | 58 ++++++++---- services/director/docker/entrypoint.sh | 124 ++++++++++++------------- 2 files changed, 100 insertions(+), 82 deletions(-) diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index 2a77aa40daa..0d7122a6681 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -6,32 +6,56 @@ IFS=$(printf '\n\t') INFO="INFO: [$(basename "$0")] " -# BOOTING application --------------------------------------------- echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." -echo " User :$(id "$(whoami)")" -echo " Workdir :$(pwd)" +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +# +# DEVELOPMENT MODE +# +# - prints environ info +# - installs requirements in mounted volume +# if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "Environment :" printenv | sed 's/=/: /' | sed 's/^/ /' | sort echo "$INFO" "Python :" python --version | sed 's/^/ /' command -v python | sed 's/^/ /' - cd services/director || exit 1 - # speedup for legacy service with all essential dependencies pinned - # in this case `--no-deps` does the trick, for details see link - # https://stackoverflow.com/a/65793484/2855718 - pip install --no-cache-dir --no-deps -r requirements/dev.txt - cd - || exit 1 - echo "$INFO" "PIP :" - pip list | sed 's/^/ /' + + cd services/autoscaling + uv pip --quiet --no-cache-dir sync requirements/dev.txt + cd - + uv pip list +fi + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + # NOTE: production does NOT pre-installs debugpy + uv pip install --no-cache-dir debugpy fi -# RUNNING application ---------------------------------------- -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then - watchmedo auto-restart --recursive --pattern="*.py;*/src/*" --ignore-patterns="*test*;pytest_simcore/*;setup.py;*ignore*" --ignore-directories -- \ - python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \ - simcore_service_director --loglevel="${LOGLEVEL}" +# +# RUNNING application +# + +APP_LOG_LEVEL=${DIRECTOR_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} +SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') +echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" + +if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + + exec sh -c " + cd services/autoscaling/src/simcore_service_director && \ + python -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + --host 0.0.0.0 \ + --reload \ + $reload_dir_packages + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " else - exec simcore-service-director --loglevel="${LOGLEVEL}" + exec uvicorn simcore_service_director.main:the_app \ + --host 0.0.0.0 \ + --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director/docker/entrypoint.sh b/services/director/docker/entrypoint.sh index 232da22ba7e..ad982fd8d5c 100755 --- a/services/director/docker/entrypoint.sh +++ b/services/director/docker/entrypoint.sh @@ -1,4 +1,9 @@ #!/bin/sh +# +# - Executes *inside* of the container upon start as --user [default root] +# - Notice that the container *starts* as --user [default root] but +# *runs* as non-root user [scu] +# set -o errexit set -o nounset @@ -10,86 +15,75 @@ ERROR="ERROR: [$(basename "$0")] " # Read self-signed SSH certificates (if applicable) # -# In case the director must access a docker registry in a secure way using +# In case clusters-keeper must access a docker registry in a secure way using # non-standard certificates (e.g. such as self-signed certificates), this call is needed. -# It needs to be executed as root. +# It needs to be executed as root. Also required to any access for example to secure rabbitmq. update-ca-certificates -# This entrypoint script: -# -# - Executes *inside* of the container upon start as --user [default root] -# - Notice that the container *starts* as --user [default root] but -# *runs* as non-root user [scu] -# echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." -echo "$INFO" "User :$(id "$(whoami)")" -echo "$INFO" "Workdir :$(pwd)" -echo scuUser :"$(id scu)" - -if [ "${SC_BUILD_TARGET}" = "development" ] -then - # NOTE: expects docker run ... -v $(pwd):/devel/services/director - DEVEL_MOUNT=/devel/services/director +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +echo "$INFO" "User : $(id scu)" +echo "$INFO" "python : $(command -v python)" +echo "$INFO" "pip : $(command -v pip)" - stat $DEVEL_MOUNT > /dev/null 2>&1 || \ - (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script +# +# DEVELOPMENT MODE +# - expects docker run ... -v $(pwd):$SC_DEVEL_MOUNT +# - mounts source folders +# - deduces host's uid/gip and assigns to user within docker +# +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "development mode detected..." + stat "${SC_DEVEL_MOUNT}" >/dev/null 2>&1 || + (echo "$ERROR" "You must mount '$SC_DEVEL_MOUNT' to deduce user and group ids" && exit 1) - echo "setting correct user id/group id..." - HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}") - HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}") - CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) - if [ "$HOST_USERID" -eq 0 ] - then - echo "Warning: Folder mounted owned by root user... adding $SC_USER_NAME to root..." - adduser "$SC_USER_NAME" root + echo "$INFO" "setting correct user id/group id..." + HOST_USERID=$(stat --format=%u "${SC_DEVEL_MOUNT}") + HOST_GROUPID=$(stat --format=%g "${SC_DEVEL_MOUNT}") + CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) + if [ "$HOST_USERID" -eq 0 ]; then + echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root + else + echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ]; then + echo "$WARNING" "Creating new group grp$SC_USER_NAME" + CONT_GROUPNAME=grp$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" else - echo "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." - # take host's credentials in $SC_USER_NAME - if [ -z "$CONT_GROUPNAME" ] - then - echo "Creating new group my$SC_USER_NAME" - CONT_GROUPNAME=my$SC_USER_NAME - addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" - else - echo "group already exists" - fi - echo "adding $SC_USER_NAME to group $CONT_GROUPNAME..." - adduser "$SC_USER_NAME" "$CONT_GROUPNAME" - - echo "changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" - usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" - - echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around - echo "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + echo "$INFO" "group already exists" fi -fi + echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + echo "$WARNING" "Changing ownership [this could take some time]" + echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ] -then - # NOTE: production does NOT pre-installs ptvsd - python3 -m pip install ptvsd + echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fi fi # Appends docker group if socket is mounted DOCKER_MOUNT=/var/run/docker.sock -if stat $DOCKER_MOUNT > /dev/null 2>&1 -then - echo "$INFO detected docker socket is mounted, adding user to group..." - GROUPID=$(stat --format=%g $DOCKER_MOUNT) - GROUPNAME=scdocker +if stat $DOCKER_MOUNT >/dev/null 2>&1; then + echo "$INFO detected docker socket is mounted, adding user to group..." + GROUPID=$(stat --format=%g $DOCKER_MOUNT) + GROUPNAME=scdocker - if ! addgroup --gid "$GROUPID" $GROUPNAME > /dev/null 2>&1 - then - echo "$WARNING docker group with $GROUPID already exists, getting group name..." - # if group already exists in container, then reuse name - GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) - echo "$WARNING docker group with $GROUPID has name $GROUPNAME" - fi - adduser "$SC_USER_NAME" "$GROUPNAME" + if ! addgroup --gid "$GROUPID" $GROUPNAME >/dev/null 2>&1; then + echo "$WARNING docker group with $GROUPID already exists, getting group name..." + # if group already exists in container, then reuse name + GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1) + echo "$WARNING docker group with $GROUPID has name $GROUPNAME" + fi + adduser "$SC_USER_NAME" "$GROUPNAME" fi echo "$INFO Starting $* ..." From edb40bc9dc5bdafcec18cdc421a7e7c366bcc95e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 13:37:51 +0100 Subject: [PATCH 052/201] healthcheck --- services/director/docker/healthcheck.py | 33 +++++++++++++------------ 1 file changed, 17 insertions(+), 16 deletions(-) mode change 100644 => 100755 services/director/docker/healthcheck.py diff --git a/services/director/docker/healthcheck.py b/services/director/docker/healthcheck.py old mode 100644 new mode 100755 index b3a1e7e8cad..10e58d00e21 --- a/services/director/docker/healthcheck.py +++ b/services/director/docker/healthcheck.py @@ -8,7 +8,7 @@ --timeout=30s \ --start-period=1s \ --retries=3 \ - CMD python3 docker/healthcheck.py http://localhost:8080/v0/ + CMD python3 docker/healthcheck.py http://localhost:8000/ ``` Q&A: @@ -18,23 +18,24 @@ import os import sys +from contextlib import suppress from urllib.request import urlopen -SUCCESS, UNHEALTHY = 0, 1 +# Disabled if boots with debugger (e.g. debug, pdb-debug, debug-ptvsd, etc) +SC_BOOT_MODE = os.environ.get("SC_BOOT_MODE", "") -# Disabled if boots with debugger -ok = os.environ.get("SC_BOOT_MODE") == "debug" +# Adds a base-path if defined in environ +SIMCORE_NODE_BASEPATH = os.environ.get("SIMCORE_NODE_BASEPATH", "") -# Queries host -# pylint: disable=consider-using-with -ok = ( - ok - or urlopen( - "{host}{baseurl}".format( - host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") - ) # adds a base-path if defined in environ - ).getcode() - == 200 -) -sys.exit(SUCCESS if ok else UNHEALTHY) +def is_service_healthy() -> bool: + if "debug" in SC_BOOT_MODE.lower(): + return True + + with suppress(Exception): + with urlopen(f"{sys.argv[1]}{SIMCORE_NODE_BASEPATH}") as f: + return f.getcode() == 200 + return False + + +sys.exit(os.EX_OK if is_service_healthy() else os.EX_UNAVAILABLE) From 3e7241c05a9c4499874eb692e50f90a9745ac5ba Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:08:59 +0100 Subject: [PATCH 053/201] base routes --- .../simcore_service_director/api/__init__.py | 0 .../api/rest/__init__.py | 0 .../api/rest/_health.py | 11 +++++ .../api/rest/_running_interactive_services.py | 41 +++++++++++++++++++ .../api/rest/_service_extras.py | 14 +++++++ .../api/rest/_services.py | 30 ++++++++++++++ .../api/rest/routes.py | 26 ++++++++++++ 7 files changed, 122 insertions(+) create mode 100644 services/director/src/simcore_service_director/api/__init__.py create mode 100644 services/director/src/simcore_service_director/api/rest/__init__.py create mode 100644 services/director/src/simcore_service_director/api/rest/_health.py create mode 100644 services/director/src/simcore_service_director/api/rest/_running_interactive_services.py create mode 100644 services/director/src/simcore_service_director/api/rest/_service_extras.py create mode 100644 services/director/src/simcore_service_director/api/rest/_services.py create mode 100644 services/director/src/simcore_service_director/api/rest/routes.py diff --git a/services/director/src/simcore_service_director/api/__init__.py b/services/director/src/simcore_service_director/api/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/api/rest/__init__.py b/services/director/src/simcore_service_director/api/rest/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/api/rest/_health.py b/services/director/src/simcore_service_director/api/rest/_health.py new file mode 100644 index 00000000000..78659b036ae --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_health.py @@ -0,0 +1,11 @@ +import arrow +from fastapi import APIRouter +from fastapi.responses import PlainTextResponse + +router = APIRouter() + + +@router.get("/", include_in_schema=True, response_class=PlainTextResponse) +async def health_check() -> str: + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py new file mode 100644 index 00000000000..1bd53330d30 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -0,0 +1,41 @@ +from pathlib import Path +from uuid import UUID + +import arrow +from fastapi import APIRouter +from models_library.projects import ProjectID +from models_library.services_types import ServiceKey, ServiceVersion +from models_library.users import UserID + +router = APIRouter() + + +@router.get("/running_interactive_services") +async def list_running_services(user_id: UserID, project_id: ProjectID): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.post("/running_interactive_services") +async def start_service( + user_id: UserID, + project_id: ProjectID, + service_key: ServiceKey, + service_uuid: UUID, + service_basepath: Path, + service_tag: ServiceVersion | None = None, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.get("/running_interactive_services/{service_uuid}") +async def get_running_service(service_uuid: UUID): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.delete("/running_interactive_services/{service_uuid}") +async def stop_service(service_uuid: UUID): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py new file mode 100644 index 00000000000..1c30f411e22 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -0,0 +1,14 @@ +import arrow +from fastapi import APIRouter +from models_library.services_types import ServiceKey, ServiceVersion + +router = APIRouter() + + +@router.get("/service_extras/{service_key}/{service_version}") +async def list_service_extras( + service_key: ServiceKey, + service_version: ServiceVersion, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py new file mode 100644 index 00000000000..2bf1b066bf3 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -0,0 +1,30 @@ +import arrow +from fastapi import APIRouter +from models_library.services_enums import ServiceType +from models_library.services_types import ServiceKey, ServiceVersion + +router = APIRouter() + + +@router.get("/services") +async def list_services(service_type: ServiceType | None = None): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.get("/services/{service_key}/{service_version}") +async def get_service( + service_key: ServiceKey, + service_version: ServiceVersion, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + + +@router.get("/services/{service_key}/{service_version}/labels") +async def list_service_labels( + service_key: ServiceKey, + service_version: ServiceVersion, +): + # NOTE: sync url in docker/healthcheck.py with this entrypoint! + return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py new file mode 100644 index 00000000000..82ccec19302 --- /dev/null +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -0,0 +1,26 @@ +from fastapi import APIRouter, FastAPI, HTTPException +from servicelib.fastapi.exceptions_utils import ( + handle_errors_as_500, + http_exception_as_json_response, +) + +from .._meta import API_VTAG +from . import _health, _running_interactive_services, _service_extras, _services + + +def setup_api_routes(app: FastAPI): + """ + Composes resources/sub-resources routers + """ + + app.include_router(_health.router, tags=["operations"]) + + # include the rest under /vX + api_router = APIRouter(prefix=f"/{API_VTAG}") + api_router.include_router(_services.router, tags=["services"]) + api_router.include_router(_service_extras.router, tags=["services"]) + api_router.include_router(_running_interactive_services.router, tags=["services"]) + app.include_router(api_router) + + app.add_exception_handler(Exception, handle_errors_as_500) + app.add_exception_handler(HTTPException, http_exception_as_json_response) From f3ffacb9d496390df27da39e3f9bad71f44fb5ac Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:09:15 +0100 Subject: [PATCH 054/201] added meta --- .../src/simcore_service_director/_meta.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 services/director/src/simcore_service_director/_meta.py diff --git a/services/director/src/simcore_service_director/_meta.py b/services/director/src/simcore_service_director/_meta.py new file mode 100644 index 00000000000..bbcd44e8945 --- /dev/null +++ b/services/director/src/simcore_service_director/_meta.py @@ -0,0 +1,43 @@ +""" Application's metadata + +""" + +from typing import Final + +from models_library.basic_types import VersionStr +from packaging.version import Version +from servicelib.utils_meta import PackageInfo + +info: Final = PackageInfo(package_name="simcore-service-director") +__version__: Final[VersionStr] = info.__version__ + + +PROJECT_NAME: Final[str] = info.project_name +VERSION: Final[Version] = info.version +API_VERSION: Final[VersionStr] = info.__version__ +APP_NAME: Final[str] = PROJECT_NAME +API_VTAG: Final[str] = info.api_prefix_path_tag +SUMMARY: Final[str] = info.get_summary() + + +# NOTE: https://patorjk.com/software/taag/#p=display&f=Electronic&t=Director-v0 +APP_STARTED_BANNER_MSG = r""" + + ▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄ ▄ ▄▄▄▄▄▄▄▄▄ +▐░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐░▌ ▐░▌▐░░░░░░░░░▌ +▐░█▀▀▀▀▀▀▀█░▌▀▀▀▀█░█▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▀▀▀▀█░█▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀█░▌ ▐░▌ ▐░▌▐░█░█▀▀▀▀▀█░▌ +▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░█▄▄▄▄▄▄▄█░▌ ▄▄▄▄▄▄▄▄▄▄▄▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░█▀▀▀▀█░█▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░█▀▀▀▀█░█▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ +▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐░▌▐░▌ +▐░█▄▄▄▄▄▄▄█░▌▄▄▄▄█░█▄▄▄▄ ▐░▌ ▐░▌ ▐░█▄▄▄▄▄▄▄▄▄ ▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌▐░▌ ▐░▌ ▐░▐░▌ ▐░█▄▄▄▄▄█░█░▌ +▐░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐░▌ ▐░░░░░░░░░░░▌▐░▌ ▐░▌ ▐░▌ ▐░░░░░░░░░▌ + ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀ + {} +""".format( + f"v{__version__}" +) + + +APP_FINISHED_BANNER_MSG = info.get_finished_banner() From 55591eb4be72a956ff23301d3d2fe2a1676b949c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:22:04 +0100 Subject: [PATCH 055/201] added core --- .../simcore_service_director/core/__init__.py | 0 .../core/application.py | 64 ++++++++++++++++++ .../simcore_service_director/core/errors.py | 14 ++++ .../simcore_service_director/core/settings.py | 66 +++++++++++++++++++ 4 files changed, 144 insertions(+) create mode 100644 services/director/src/simcore_service_director/core/__init__.py create mode 100644 services/director/src/simcore_service_director/core/application.py create mode 100644 services/director/src/simcore_service_director/core/errors.py create mode 100644 services/director/src/simcore_service_director/core/settings.py diff --git a/services/director/src/simcore_service_director/core/__init__.py b/services/director/src/simcore_service_director/core/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py new file mode 100644 index 00000000000..f75f0c35336 --- /dev/null +++ b/services/director/src/simcore_service_director/core/application.py @@ -0,0 +1,64 @@ +import logging +from typing import Final + +from fastapi import FastAPI +from servicelib.fastapi.tracing import setup_tracing + +from .._meta import ( + API_VERSION, + API_VTAG, + APP_FINISHED_BANNER_MSG, + APP_NAME, + APP_STARTED_BANNER_MSG, +) +from ..api.rest.routes import setup_api_routes +from .settings import ApplicationSettings + +_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR +_NOISY_LOGGERS: Final[tuple[str]] = ("werkzeug",) + +logger = logging.getLogger(__name__) + + +def create_app(settings: ApplicationSettings) -> FastAPI: + # keep mostly quiet noisy loggers + quiet_level: int = max( + min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + ) + for name in _NOISY_LOGGERS: + logging.getLogger(name).setLevel(quiet_level) + + logger.info("app settings: %s", settings.json(indent=1)) + + app = FastAPI( + debug=settings.DIRECTOR_DEBUG, + title=APP_NAME, + description="Director-v0 service", + version=API_VERSION, + openapi_url=f"/api/{API_VTAG}/openapi.json", + docs_url="/dev/doc", + redoc_url=None, # default disabled + ) + # STATE + app.state.settings = settings + assert app.state.settings.API_VERSION == API_VERSION # nosec + + # PLUGINS SETUP + setup_api_routes(app) + + if app.state.settings.DIRECTOR_TRACING: + setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + + # ERROR HANDLERS + + # EVENTS + async def _on_startup() -> None: + print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 + + async def _on_shutdown() -> None: + print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + return app diff --git a/services/director/src/simcore_service_director/core/errors.py b/services/director/src/simcore_service_director/core/errors.py new file mode 100644 index 00000000000..2664f266da7 --- /dev/null +++ b/services/director/src/simcore_service_director/core/errors.py @@ -0,0 +1,14 @@ +from typing import Any + +from models_library.errors_classes import OsparcErrorMixin + + +class DirectorRuntimeError(OsparcErrorMixin, RuntimeError): + def __init__(self, **ctx: Any) -> None: + super().__init__(**ctx) + + msg_template: str = "Director-v0 unexpected error" + + +class ConfigurationError(DirectorRuntimeError): + msg_template: str = "Application misconfiguration: {msg}" diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py new file mode 100644 index 00000000000..e1bd33985be --- /dev/null +++ b/services/director/src/simcore_service_director/core/settings.py @@ -0,0 +1,66 @@ +from models_library.basic_types import ( + BootModeEnum, + BuildTargetEnum, + LogLevel, + PortInt, + VersionTag, +) +from pydantic import Field, PositiveInt +from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from settings_library.base import BaseCustomSettings +from settings_library.tracing import TracingSettings +from settings_library.utils_logging import MixinLoggingSettings + +from .._meta import API_VERSION, API_VTAG, APP_NAME + + +class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): + # CODE STATICS --------------------------------------------------------- + API_VERSION: str = API_VERSION + APP_NAME: str = APP_NAME + API_VTAG: VersionTag = API_VTAG + + # IMAGE BUILDTIME ------------------------------------------------------ + # @Makefile + SC_BUILD_DATE: str | None = None + SC_BUILD_TARGET: BuildTargetEnum | None = None + SC_VCS_REF: str | None = None + SC_VCS_URL: str | None = None + + # @Dockerfile + SC_BOOT_MODE: BootModeEnum | None = None + SC_BOOT_TARGET: BuildTargetEnum | None = None + SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( + None, + description="If a single run of the check takes longer than timeout seconds " + "then the check is considered to have failed." + "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", + ) + SC_USER_ID: int | None = None + SC_USER_NAME: str | None = None + + # RUNTIME ----------------------------------------------------------- + DIRECTOR_DEBUG: bool = Field( + default=False, description="Debug mode", env=["DIRECTOR_DEBUG", "DEBUG"] + ) + DIRECTOR_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) + + DIRECTOR_LOGLEVEL: LogLevel = Field( + LogLevel.INFO, env=["DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + ) + DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( + default=False, + env=[ + "DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ], + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ) + DIRECTOR_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( + default_factory=dict, + env=["DIRECTOR_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"], + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ) + DIRECTOR_TRACING: TracingSettings | None = Field( + auto_default_from_env=True, description="settings for opentelemetry tracing" + ) From 70d183ba0da85fb3284e48ec148bc57b669b30d4 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:22:19 +0100 Subject: [PATCH 056/201] modified meta --- services/director/src/simcore_service_director/_meta.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/_meta.py b/services/director/src/simcore_service_director/_meta.py index bbcd44e8945..5bf4218d678 100644 --- a/services/director/src/simcore_service_director/_meta.py +++ b/services/director/src/simcore_service_director/_meta.py @@ -4,7 +4,7 @@ from typing import Final -from models_library.basic_types import VersionStr +from models_library.basic_types import VersionStr, VersionTag from packaging.version import Version from servicelib.utils_meta import PackageInfo @@ -16,7 +16,7 @@ VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ APP_NAME: Final[str] = PROJECT_NAME -API_VTAG: Final[str] = info.api_prefix_path_tag +API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) SUMMARY: Final[str] = info.get_summary() From fd49194e51373f5fafc4f024062d92192e221608 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:22:37 +0100 Subject: [PATCH 057/201] added cli --- .../director/src/simcore_service_director/cli.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 services/director/src/simcore_service_director/cli.py diff --git a/services/director/src/simcore_service_director/cli.py b/services/director/src/simcore_service_director/cli.py new file mode 100644 index 00000000000..4b6beb2a800 --- /dev/null +++ b/services/director/src/simcore_service_director/cli.py @@ -0,0 +1,16 @@ +import logging + +import typer +from settings_library.utils_cli import create_settings_command, create_version_callback + +from ._meta import PROJECT_NAME, __version__ +from .core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + +main = typer.Typer(name=PROJECT_NAME) + +main.command()( + create_settings_command(settings_cls=ApplicationSettings, logger=_logger) +) +main.callback()(create_version_callback(__version__)) From a55861b10f9bce11ae4d2642fb594692599fffcc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:24:26 +0100 Subject: [PATCH 058/201] replaced main --- .../src/simcore_service_director/main.py | 53 ++++++------------- .../src/simcore_service_director/main_old.py | 41 ++++++++++++++ 2 files changed, 58 insertions(+), 36 deletions(-) create mode 100644 services/director/src/simcore_service_director/main_old.py diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index 0bf6edccc57..173c3e6c9c7 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,42 +1,23 @@ -#!/usr/bin/env python3 -import logging - -from aiohttp import web - -# NOTE: notice that servicelib is frozen to c8669fb52659b684514fefa4f3b4599f57f276a0 -# pylint: disable=no-name-in-module -from servicelib.client_session import persistent_client_session -from simcore_service_director import registry_cache_task, resources -from simcore_service_director.monitoring import setup_app_monitoring -from simcore_service_director.rest import routing - -from .registry_proxy import setup_registry - -log = logging.getLogger(__name__) - +"""Main application to be deployed by uvicorn (or equivalent) server -def setup_app() -> web.Application: - api_spec_path = resources.get_path(resources.RESOURCE_OPEN_API) - app = routing.create_web_app(api_spec_path.parent, api_spec_path.name) +""" - # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed - app.cleanup_ctx.append(persistent_client_session) - app.cleanup_ctx.append(setup_registry) - - registry_cache_task.setup(app) - - setup_app_monitoring(app, "simcore_service_director") - - # NOTE: removed tracing from director. Users old version of servicelib and - # in any case this service will be completely replaced - - return app +import logging +from fastapi import FastAPI +from servicelib.logging_utils import config_all_loggers +from simcore_service_director.core.application import create_app +from simcore_service_director.core.settings import ApplicationSettings -def main() -> None: - app = setup_app() - web.run_app(app, port=8080) +_the_settings = ApplicationSettings.create_from_envs() +# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 +logging.basicConfig(level=_the_settings.log_level) # NOSONAR +logging.root.setLevel(_the_settings.log_level) +config_all_loggers( + log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, +) -if __name__ == "__main__": - main() +# SINGLETON FastAPI app +the_app: FastAPI = create_app(_the_settings) diff --git a/services/director/src/simcore_service_director/main_old.py b/services/director/src/simcore_service_director/main_old.py new file mode 100644 index 00000000000..39a6c0dfc38 --- /dev/null +++ b/services/director/src/simcore_service_director/main_old.py @@ -0,0 +1,41 @@ +import logging + +from aiohttp import web + +# NOTE: notice that servicelib is frozen to c8669fb52659b684514fefa4f3b4599f57f276a0 +# pylint: disable=no-name-in-module +from servicelib.client_session import persistent_client_session +from simcore_service_director import registry_cache_task, resources +from simcore_service_director.monitoring import setup_app_monitoring +from simcore_service_director.rest import routing + +from .registry_proxy import setup_registry + +log = logging.getLogger(__name__) + + +def setup_app() -> web.Application: + api_spec_path = resources.get_path(resources.RESOURCE_OPEN_API) + app = routing.create_web_app(api_spec_path.parent, api_spec_path.name) + + # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed + app.cleanup_ctx.append(persistent_client_session) + app.cleanup_ctx.append(setup_registry) + + registry_cache_task.setup(app) + + setup_app_monitoring(app, "simcore_service_director") + + # NOTE: removed tracing from director. Users old version of servicelib and + # in any case this service will be completely replaced + + return app + + +def main() -> None: + app = setup_app() + web.run_app(app, port=8080) + + +if __name__ == "__main__": + main() From 63b84d561e8693d3736d1522636a46646f5a95ae Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:15:30 +0100 Subject: [PATCH 059/201] moved config to settings/constants --- .../src/simcore_service_director/constants.py | 19 +++ .../simcore_service_director/core/settings.py | 113 +++++++++++++++++- 2 files changed, 131 insertions(+), 1 deletion(-) create mode 100644 services/director/src/simcore_service_director/constants.py diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py new file mode 100644 index 00000000000..8ee614e5f8e --- /dev/null +++ b/services/director/src/simcore_service_director/constants.py @@ -0,0 +1,19 @@ +from typing import Final + +SERVICE_RUNTIME_SETTINGS: Final[str] = "simcore.service.settings" +SERVICE_REVERSE_PROXY_SETTINGS: Final[str] = "simcore.service.reverse-proxy-settings" +SERVICE_RUNTIME_BOOTSETTINGS: Final[str] = "simcore.service.bootsettings" + +ORG_LABELS_TO_SCHEMA_LABELS: Final[dict[str, str]] = { + "org.label-schema.build-date": "build_date", + "org.label-schema.vcs-ref": "vcs_ref", + "org.label-schema.vcs-url": "vcs_url", +} + + +CPU_RESOURCE_LIMIT_KEY: Final[str] = "SIMCORE_NANO_CPUS_LIMIT" +MEM_RESOURCE_LIMIT_KEY: Final[str] = "SIMCORE_MEMORY_BYTES_LIMIT" + +APP_REGISTRY_CACHE_DATA_KEY: Final[str] = __name__ + "_registry_cache_data" + +API_ROOT: Final[str] = "api" diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index e1bd33985be..4d288431c2c 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -1,3 +1,6 @@ +import datetime +import warnings + from models_library.basic_types import ( BootModeEnum, BuildTargetEnum, @@ -5,13 +8,16 @@ PortInt, VersionTag, ) -from pydantic import Field, PositiveInt +from pydantic import AnyUrl, ByteSize, Field, PositiveInt, parse_obj_as, validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings +from settings_library.docker_registry import RegistrySettings +from settings_library.postgres import PostgresSettings from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings from .._meta import API_VERSION, API_VTAG, APP_NAME +from ..constants import API_ROOT class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): @@ -64,3 +70,108 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_TRACING: TracingSettings | None = Field( auto_default_from_env=True, description="settings for opentelemetry tracing" ) + + # migrated settings + DIRECTOR_DEFAULT_MAX_NANO_CPUS: int = Field( + default=1 * pow(10, 9), + env=["DIRECTOR_DEFAULT_MAX_NANO_CPUS", "DEFAULT_MAX_NANO_CPUS"], + ) + DIRECTOR_DEFAULT_MAX_MEMORY: int = Field( + default=parse_obj_as(ByteSize, "2GiB"), + env=["DIRECTOR_DEFAULT_MAX_MEMORY", "DEFAULT_MAX_MEMORY"], + ) + DIRECTOR_REGISTRY_CACHING: bool = Field( + default=True, description="cache the docker registry internally" + ) + DIRECTOR_REGISTRY_CACHING_TTL: datetime.timedelta = Field( + default=datetime.timedelta(minutes=15), + description="cache time to live value (defaults to 15 minutes)", + ) + DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str = "" + + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ + str, str + ] = Field(default_factory=dict) + DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: str = "" + DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: str = "" + DIRECTOR_SELF_SIGNED_SSL_FILENAME: str = "" + + DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = 10 + DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = 12 + + DIRECTOR_TRAEFIK_SIMCORE_ZONE: str = Field( + default="internal_simcore_stack", + env=["DIRECTOR_TRAEFIK_SIMCORE_ZONE", "TRAEFIK_SIMCORE_ZONE"], + ) + + DIRECTOR_REGISTRY: RegistrySettings = Field( + auto_default_from_env=True, + description="settings for the private registry deployed with the platform", + ) + + DIRECTOR_EXTRA_HOSTS_SUFFIX: str = Field( + default="undefined", env=["DIRECTOR_EXTRA_HOSTS_SUFFIX", "EXTRA_HOSTS_SUFFIX"] + ) + + DIRECTOR_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) + STORAGE_ENDPOINT: AnyUrl = Field(...) + + # TODO: this needs some code changes + # SERVICES_DEFAULT_ENVS: dict[str, str] = { + # "POSTGRES_ENDPOINT": os.environ.get( + # "POSTGRES_ENDPOINT", "undefined postgres endpoint" + # ), + # "POSTGRES_USER": os.environ.get("POSTGRES_USER", "undefined postgres user"), + # "POSTGRES_PASSWORD": os.environ.get( + # "POSTGRES_PASSWORD", "undefined postgres password" + # ), + # "POSTGRES_DB": os.environ.get("POSTGRES_DB", "undefined postgres db"), + # "STORAGE_ENDPOINT": os.environ.get( + # "STORAGE_ENDPOINT", "undefined storage endpoint" + # ), + # } + + DIRECTOR_PUBLISHED_HOST_NAME: str = Field( + default="", env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] + ) + + DIRECTOR_SWARM_STACK_NAME: str = Field( + default="undefined-please-check", + env=["DIRECTOR_SWARM_STACK_NAME", "SWARM_STACK_NAME"], + ) + + # used when in devel mode vs release mode + DIRECTOR_NODE_SCHEMA_LOCATION: str = Field( + default=f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json", + env=["DIRECTOR_NODE_SCHEMA_LOCATION", "NODE_SCHEMA_LOCATION"], + ) + # used to find the right network name + DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: str | None = Field( + default=None, + env=["DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME", "SIMCORE_SERVICES_NETWORK_NAME"], + ) + # useful when developing with an alternative registry namespace + DIRECTOR_SIMCORE_SERVICES_PREFIX: str = Field( + default="simcore/services", + env=["DIRECTOR_SIMCORE_SERVICES_PREFIX", "SIMCORE_SERVICES_PREFIX"], + ) + + DIRECTOR_MONITORING_ENABLED: bool = Field( + default=False, env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] + ) + + @validator("DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") + @classmethod + def _validate_substitutions(cls, v): + if v: + warnings.warn( # noqa: B028 + "Generic resources will be replaced by the following " + f"placement constraints {v}. This is a workaround " + "for https://github.com/moby/swarmkit/pull/3162", + UserWarning, + ) + if len(v) != len(set(v.values())): + msg = f"Dictionary values must be unique, provided: {v}" + raise ValueError(msg) + + return v From 5380ab441e9aeed60e782c0de0d69ced367a6cf3 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:31:51 +0100 Subject: [PATCH 060/201] conftest --- services/director/tests/conftest.py | 117 ++++++++++++++++------------ 1 file changed, 66 insertions(+), 51 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index eecb693e0de..f59bb2b5fe3 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -8,75 +8,77 @@ import pytest import simcore_service_director -from aiohttp import ClientSession +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director import config, resources -# NOTE: that all the changes in these pytest-plugins MUST by py3.6 compatible! pytest_plugins = [ - "fixtures.fake_services", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", + "pytest_simcore.environment_configs", + "pytest_simcore.fakers_projects_data", + "pytest_simcore.fakers_users_data", "pytest_simcore.repository_paths", - "pytest_simcore.pytest_global_environs", ] -@pytest.fixture -def configure_swarm_stack_name(): - config.SWARM_STACK_NAME = "test_stack" +def pytest_addoption(parser): + parser.addoption("--registry_url", action="store", default="default url") + parser.addoption("--registry_user", action="store", default="default user") + parser.addoption("--registry_pw", action="store", default="default pw") @pytest.fixture(scope="session") -def common_schemas_specs_dir(osparc_simcore_root_dir): - specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" - assert specs_dir.exists() - return specs_dir +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: + # fixtures in pytest_simcore.environs + service_folder = osparc_simcore_root_dir / "services" / "director" + assert service_folder.exists() + assert any(service_folder.glob("src/simcore_service_director")) + return service_folder @pytest.fixture(scope="session") -def package_dir(): +def installed_package_dir() -> Path: dirpath = Path(simcore_service_director.__file__).resolve().parent assert dirpath.exists() return dirpath +@pytest.fixture(scope="session") +def common_schemas_specs_dir(osparc_simcore_root_dir: Path): + specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" + assert specs_dir.exists() + return specs_dir + + @pytest.fixture -def configure_schemas_location(package_dir, common_schemas_specs_dir): +def configure_schemas_location( + installed_package_dir: Path, common_schemas_specs_dir: Path +): config.NODE_SCHEMA_LOCATION = str( common_schemas_specs_dir / "node-meta-v0.0.1.json" ) resources.RESOURCE_NODE_SCHEMA = os.path.relpath( - config.NODE_SCHEMA_LOCATION, package_dir + config.NODE_SCHEMA_LOCATION, installed_package_dir ) +@pytest.fixture(scope="session") +def configure_swarm_stack_name(): + config.SWARM_STACK_NAME = "test_stack" + + @pytest.fixture -def configure_registry_access(docker_registry): +def configure_registry_access(docker_registry: str): config.REGISTRY_URL = docker_registry config.REGISTRY_PATH = docker_registry config.REGISTRY_SSL = False config.DIRECTOR_REGISTRY_CACHING = False -@pytest.fixture -def user_id(): - yield "some_user_id" - - -@pytest.fixture -def project_id(): - yield "some_project_id" - - -def pytest_addoption(parser): - parser.addoption("--registry_url", action="store", default="default url") - parser.addoption("--registry_user", action="store", default="default user") - parser.addoption("--registry_pw", action="store", default="default pw") - - @pytest.fixture(scope="session") -def configure_custom_registry(pytestconfig): +def configure_custom_registry(pytestconfig: pytest.Config): # to set these values call # pytest --registry_url myregistry --registry_user username --registry_pw password config.REGISTRY_URL = pytestconfig.getoption("registry_url") @@ -87,29 +89,42 @@ def configure_custom_registry(pytestconfig): @pytest.fixture -async def aiohttp_mock_app(loop, mocker): - print("client session started ...") - session = ClientSession() +def api_version_prefix() -> str: + assert "v0" in resources.listdir(resources.RESOURCE_OPENAPI_ROOT) + return "v0" - mock_app_storage = { - config.APP_CLIENT_SESSION_KEY: session, - config.APP_REGISTRY_CACHE_DATA_KEY: {}, - } - def _get_item(self, key): - return mock_app_storage[key] +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_environment_dict: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **docker_compose_service_environment_dict, + # ADD here env-var overrides + }, + ) - aiohttp_app = mocker.patch("aiohttp.web.Application") - aiohttp_app.__getitem__ = _get_item - yield aiohttp_app +# @pytest.fixture +# async def aiohttp_mock_app(loop, mocker): +# print("client session started ...") +# session = ClientSession() - # cleanup session - await session.close() - print("client session closed") +# mock_app_storage = { +# config.APP_CLIENT_SESSION_KEY: session, +# config.APP_REGISTRY_CACHE_DATA_KEY: {}, +# } +# def _get_item(self, key): +# return mock_app_storage[key] -@pytest.fixture -def api_version_prefix() -> str: - assert "v0" in resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - return "v0" +# aiohttp_app = mocker.patch("aiohttp.web.Application") +# aiohttp_app.__getitem__ = _get_item + +# yield aiohttp_app + +# # cleanup session +# await session.close() From 9682c3b51b85fb8f05cbc2df522c8d67605dd0e8 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:33:43 +0100 Subject: [PATCH 061/201] ruffed --- .../src/simcore_service_director/config.py | 13 ++++---- .../director/tests/fixtures/fake_services.py | 6 ++-- services/director/tests/test_docker_utils.py | 6 ++-- .../director/tests/test_dummy_services.py | 2 +- services/director/tests/test_handlers.py | 31 +++++++------------ services/director/tests/test_json_schemas.py | 1 - services/director/tests/test_oas.py | 1 - .../director/tests/test_openapi_schemas.py | 20 +++++------- services/director/tests/test_producer.py | 4 +-- .../tests/test_registry_cache_task.py | 2 +- .../director/tests/test_registry_proxy.py | 18 +++++------ services/director/tests/test_utils.py | 4 +-- 12 files changed, 45 insertions(+), 63 deletions(-) diff --git a/services/director/src/simcore_service_director/config.py b/services/director/src/simcore_service_director/config.py index 67a15cb05ac..090a506adb0 100644 --- a/services/director/src/simcore_service_director/config.py +++ b/services/director/src/simcore_service_director/config.py @@ -6,9 +6,8 @@ import os import warnings from distutils.util import strtobool -from typing import Dict, Optional -from servicelib.client_session import ( # pylint: disable=no-name-in-module +from servicelib.aiohttp.application_keys import ( # pylint: disable=no-name-in-module APP_CLIENT_SESSION_KEY, ) @@ -63,11 +62,11 @@ def _from_env_with_default(env: str, python_type, default): ) -def _parse_placement_substitutions() -> Dict[str, str]: +def _parse_placement_substitutions() -> dict[str, str]: str_env_var: str = os.environ.get( "DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", "{}" ) - result: Dict[str, str] = json.loads(str_env_var) + result: dict[str, str] = json.loads(str_env_var) if len(result) > 0: warnings.warn( # noqa: B028 @@ -83,7 +82,7 @@ def _parse_placement_substitutions() -> Dict[str, str]: return result -DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: Dict[ +DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ str, str ] = _parse_placement_substitutions() @@ -126,7 +125,7 @@ def _parse_placement_substitutions() -> Dict[str, str]: EXTRA_HOSTS_SUFFIX: str = os.environ.get("EXTRA_HOSTS_SUFFIX", "undefined") # these are the envs passed to the dynamic services by default -SERVICES_DEFAULT_ENVS: Dict[str, str] = { +SERVICES_DEFAULT_ENVS: dict[str, str] = { "POSTGRES_ENDPOINT": os.environ.get( "POSTGRES_ENDPOINT", "undefined postgres endpoint" ), @@ -151,7 +150,7 @@ def _parse_placement_substitutions() -> Dict[str, str]: "NODE_SCHEMA_LOCATION", f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json" ) # used to find the right network name -SIMCORE_SERVICES_NETWORK_NAME: Optional[str] = os.environ.get( +SIMCORE_SERVICES_NETWORK_NAME: str | None = os.environ.get( "SIMCORE_SERVICES_NETWORK_NAME" ) # useful when developing with an alternative registry namespace diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index e58f547f729..b709e422271 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -19,7 +19,7 @@ _logger = logging.getLogger(__name__) -@pytest.fixture(scope="function") +@pytest.fixture() def push_services(docker_registry, tmpdir): registry_url = docker_registry tmp_dir = Path(tmpdir) @@ -50,7 +50,7 @@ async def build_push_images( images_to_build = [] - for image_index in range(0, number_of_computational_services): + for image_index in range(number_of_computational_services): images_to_build.append( _build_push_image( tmp_dir, @@ -63,7 +63,7 @@ async def build_push_images( ) ) - for image_index in range(0, number_of_interactive_services): + for image_index in range(number_of_interactive_services): images_to_build.append( _build_push_image( tmp_dir, diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/test_docker_utils.py index f6cce146e4b..76b4abd2051 100644 --- a/services/director/tests/test_docker_utils.py +++ b/services/director/tests/test_docker_utils.py @@ -50,11 +50,11 @@ async def test_swarm_get_number_nodes(docker_swarm): async def test_swarm_has_manager_nodes(docker_swarm): - assert (await docker_utils.swarm_has_manager_nodes()) == True + assert (await docker_utils.swarm_has_manager_nodes()) is True async def test_swarm_has_worker_nodes(docker_swarm): - assert (await docker_utils.swarm_has_worker_nodes()) == False + assert (await docker_utils.swarm_has_worker_nodes()) is False async def test_push_services( @@ -62,6 +62,6 @@ async def test_push_services( configure_registry_access, configure_schemas_location, ): - images = await push_services( + await push_services( number_of_computational_services=3, number_of_interactive_services=3 ) diff --git a/services/director/tests/test_dummy_services.py b/services/director/tests/test_dummy_services.py index f38cb848b22..5427fd178c7 100644 --- a/services/director/tests/test_dummy_services.py +++ b/services/director/tests/test_dummy_services.py @@ -3,9 +3,9 @@ # pylint: disable=bare-except # pylint:disable=redefined-outer-name -import pytest import json import logging + from helpers import json_schema_validator log = logging.getLogger(__name__) diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index 4d981ede990..3a326ef6ba2 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -7,7 +7,6 @@ import json import uuid -from typing import Optional from urllib.parse import quote import pytest @@ -29,8 +28,7 @@ def client( ): app = main.setup_app() server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - client = loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) - return client + return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) async def test_root_get(client, api_version_prefix): @@ -58,7 +56,7 @@ def _check_services(created_services, services, schema_version="v1"): ] json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() == True + assert json_schema_path.exists() is True with json_schema_path.open() as file_pt: service_schema = json.load(file_pt) @@ -100,7 +98,7 @@ async def test_services_get(docker_registry, client, push_services, api_version_ assert web_response.status == 400 assert web_response.content_type == "application/json" services_enveloped = await web_response.json() - assert not "data" in services_enveloped + assert "data" not in services_enveloped assert "error" in services_enveloped web_response = await client.get( @@ -147,9 +145,9 @@ async def test_services_by_key_version_get( for created_service in created_services: service_description = created_service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ + key, version = ( quote(service_description[key], safe="") for key in ("key", "version") - ] + ) url = f"/{api_version_prefix}/services/{key}/{version}" web_response = await client.get(url) @@ -174,9 +172,9 @@ async def test_get_service_labels( for service in created_services: service_description = service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ + key, version = ( quote(service_description[key], safe="") for key in ("key", "version") - ] + ) url = f"/{api_version_prefix}/services/{key}/{version}/labels" web_response = await client.get(url) assert web_response.status == 200, await web_response.text() @@ -209,9 +207,9 @@ async def test_services_extras_by_key_version_get( for created_service in created_services: service_description = created_service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! - key, version = [ + key, version = ( quote(service_description[key], safe="") for key in ("key", "version") - ] + ) url = f"/{api_version_prefix}/service_extras/{key}/{version}" web_response = await client.get(url) @@ -232,7 +230,7 @@ async def _start_get_stop_services( user_id, project_id, api_version_prefix: str, - save_state: Optional[bool], + save_state: bool | None, expected_save_state_call: bool, mocker, ): @@ -422,7 +420,7 @@ async def test_running_services_post_and_delete( user_id, project_id, api_version_prefix, - save_state: Optional[bool], + save_state: bool | None, expected_save_state_call: bool, mocker, ): @@ -536,10 +534,5 @@ async def test_performance_get_services( print("iteration completed in", (time.perf_counter() - start_time_i), "s") stop_time = time.perf_counter() print( - "Time to run {} times: {}s, #services {}, time per call {}s/service".format( - number_of_calls, - stop_time - start_time, - number_of_services, - (stop_time - start_time) / number_of_calls / number_of_services, - ) + f"Time to run {number_of_calls} times: {stop_time - start_time}s, #services {number_of_services}, time per call {(stop_time - start_time) / number_of_calls / number_of_services}s/service" ) diff --git a/services/director/tests/test_json_schemas.py b/services/director/tests/test_json_schemas.py index 6a45b1d0740..37d68c62f09 100644 --- a/services/director/tests/test_json_schemas.py +++ b/services/director/tests/test_json_schemas.py @@ -3,7 +3,6 @@ import pytest from jsonschema import SchemaError, ValidationError, validate - from simcore_service_director import resources API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) diff --git a/services/director/tests/test_oas.py b/services/director/tests/test_oas.py index 86898604fa4..c62080a2391 100644 --- a/services/director/tests/test_oas.py +++ b/services/director/tests/test_oas.py @@ -4,7 +4,6 @@ import yaml from openapi_spec_validator import validate_spec from openapi_spec_validator.exceptions import OpenAPIValidationError - from simcore_service_director import resources diff --git a/services/director/tests/test_openapi_schemas.py b/services/director/tests/test_openapi_schemas.py index 7849534fbcf..712ce3510ac 100644 --- a/services/director/tests/test_openapi_schemas.py +++ b/services/director/tests/test_openapi_schemas.py @@ -2,10 +2,8 @@ import pytest import yaml - from openapi_spec_validator import validate_spec from openapi_spec_validator.exceptions import OpenAPIValidationError - from simcore_service_director import resources API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) @@ -15,19 +13,17 @@ def correct_schema_local_references(schema_specs): for key, value in schema_specs.items(): if isinstance(value, dict): correct_schema_local_references(value) - elif "$ref" in key: - if str(value).startswith("#/"): - # correct the reference - new_value = str(value).replace("#/", "#/components/schemas/") - schema_specs[key] = new_value + elif "$ref" in key and str(value).startswith("#/"): + # correct the reference + new_value = str(value).replace("#/", "#/components/schemas/") + schema_specs[key] = new_value def add_namespace_for_converted_schemas(schema_specs): # schemas converted from jsonschema do not have an overarching namespace. # the openapi validator does not like this # we use the jsonschema title to create a fake namespace - fake_schema_specs = {"FakeName": schema_specs} - return fake_schema_specs + return {"FakeName": schema_specs} def validate_individual_schemas(list_of_paths): @@ -44,7 +40,7 @@ def validate_individual_schemas(list_of_paths): for spec_file_path in list_of_paths: assert spec_file_path.exists() # only consider schemas - if not "openapi.yaml" in str(spec_file_path.name) and "schemas" in str( + if "openapi.yaml" not in str(spec_file_path.name) and "schemas" in str( spec_file_path ): with spec_file_path.open() as file_ptr: @@ -62,9 +58,7 @@ def validate_individual_schemas(list_of_paths): @pytest.mark.parametrize("version", API_VERSIONS) def test_valid_individual_openapi_schemas_specs(version): - name = "{root}/{version}/schemas".format( - root=resources.RESOURCE_OPENAPI_ROOT, version=version - ) + name = f"{resources.RESOURCE_OPENAPI_ROOT}/{version}/schemas" schemas_folder_path = resources.get_path(name) validate_individual_schemas(Path(schemas_folder_path).rglob("*.yaml")) validate_individual_schemas(Path(schemas_folder_path).rglob("*.yml")) diff --git a/services/director/tests/test_producer.py b/services/director/tests/test_producer.py index e8fcc4a6fdb..620e624a663 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/test_producer.py @@ -6,8 +6,8 @@ import json import uuid +from collections.abc import Callable from dataclasses import dataclass -from typing import Callable import docker import pytest @@ -158,7 +158,7 @@ async def test_find_service_tag(): ) assert latest_version == "2.11.0" # get a specific version - version = await producer._find_service_tag(list_of_images, my_service_key, "1.2.3") + await producer._find_service_tag(list_of_images, my_service_key, "1.2.3") async def test_start_stop_service(docker_network, run_services): diff --git a/services/director/tests/test_registry_cache_task.py b/services/director/tests/test_registry_cache_task.py index 056462b9199..e0272798204 100644 --- a/services/director/tests/test_registry_cache_task.py +++ b/services/director/tests/test_registry_cache_task.py @@ -22,7 +22,7 @@ def client( registry_cache_task.setup(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) + return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) async def test_registry_caching_task(client, push_services): diff --git a/services/director/tests/test_registry_proxy.py b/services/director/tests/test_registry_proxy.py index dec9ee43708..f59cb2c428f 100644 --- a/services/director/tests/test_registry_proxy.py +++ b/services/director/tests/test_registry_proxy.py @@ -5,7 +5,6 @@ import time import pytest - from simcore_service_director import config, registry_proxy @@ -133,10 +132,12 @@ async def test_list_interactive_service_dependencies( docker_dependencies = json.loads( docker_labels["simcore.service.dependencies"] ) - image_dependencies = await registry_proxy.list_interactive_service_dependencies( - aiohttp_mock_app, - service_description["key"], - service_description["version"], + image_dependencies = ( + await registry_proxy.list_interactive_service_dependencies( + aiohttp_mock_app, + service_description["key"], + service_description["version"], + ) ) assert isinstance(image_dependencies, list) assert len(image_dependencies) == len(docker_dependencies) @@ -180,6 +181,7 @@ async def test_get_image_labels( assert image_manifest_digest not in images_digests images_digests.add(image_manifest_digest) + def test_get_service_first_name(): repo = "simcore/services/dynamic/myservice/modeler/my-sub-modeler" assert registry_proxy.get_service_first_name(repo) == "myservice" @@ -273,11 +275,7 @@ async def test_get_services_performance( ) stop_time = time.perf_counter() print( - "\nTime to run getting services: {}s, #services {}, time per call {}s/service".format( - stop_time - start_time, - len(services), - (stop_time - start_time) / len(services), - ) + f"\nTime to run getting services: {stop_time - start_time}s, #services {len(services)}, time per call {(stop_time - start_time) / len(services)}s/service" ) diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py index 3141d2f2baa..8eeb7b410bd 100644 --- a/services/director/tests/test_utils.py +++ b/services/director/tests/test_utils.py @@ -6,7 +6,7 @@ @pytest.mark.parametrize( "timestr", - ( + [ # Samples taken from https://docs.docker.com/engine/reference/commandline/service_inspect/ "2020-10-09T18:44:02.558012087Z", "2020-10-09T12:28:14.771034099Z", @@ -14,7 +14,7 @@ # found cases with spaces "2020-10-09T12:28:14.77 Z", " 2020-10-09T12:28:14.77 ", - ), + ], ) def test_parse_valid_time_strings(timestr): From 98c696574bab33771e997c218744542769745411 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:37:58 +0100 Subject: [PATCH 062/201] aync tests --- services/director/setup.cfg | 11 +++++++++++ services/director/tests/conftest.py | 4 ++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/services/director/setup.cfg b/services/director/setup.cfg index 1eb089c0af8..eb3d7554b27 100644 --- a/services/director/setup.cfg +++ b/services/director/setup.cfg @@ -6,3 +6,14 @@ tag = False commit_args = --no-verify [bumpversion:file:VERSION] + + +[tool:pytest] +asyncio_mode = auto +markers = + testit: "marks test to run during development" + + +[mypy] +plugins = + pydantic.mypy diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index f59bb2b5fe3..d0275e85a03 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -17,8 +17,8 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", - "pytest_simcore.fakers_projects_data", - "pytest_simcore.fakers_users_data", + "pytest_simcore.faker_projects_data", + "pytest_simcore.faker_users_data", "pytest_simcore.repository_paths", ] From de680de82b630d7373cb39160f315f2e8e75d0a6 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:53:16 +0100 Subject: [PATCH 063/201] test_docker_utils runs --- .../simcore_service_director/docker_utils.py | 19 ++++++++----------- services/director/tests/conftest.py | 5 +++-- .../director/tests/fixtures/fake_services.py | 11 ++++++++--- services/director/tests/test_docker_utils.py | 15 ++++++++------- 4 files changed, 27 insertions(+), 23 deletions(-) diff --git a/services/director/src/simcore_service_director/docker_utils.py b/services/director/src/simcore_service_director/docker_utils.py index 56dfba1bc3a..0c6b8a1a363 100644 --- a/services/director/src/simcore_service_director/docker_utils.py +++ b/services/director/src/simcore_service_director/docker_utils.py @@ -1,18 +1,19 @@ import logging +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager import aiodocker -from asyncio_extras import async_contextmanager -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -@async_contextmanager -async def docker_client() -> aiodocker.docker.Docker: +@asynccontextmanager +async def docker_client() -> AsyncIterator[aiodocker.docker.Docker]: try: client = aiodocker.Docker() yield client except aiodocker.exceptions.DockerError: - log.exception(msg="Unexpected error with docker client") + _logger.exception(msg="Unexpected error with docker client") raise finally: await client.close() @@ -27,14 +28,10 @@ async def swarm_get_number_nodes() -> int: async def swarm_has_manager_nodes() -> bool: async with docker_client() as client: # pylint: disable=not-async-context-manager nodes = await client.nodes.list(filters={"role": "manager"}) - if nodes: - return True - return False + return bool(nodes) async def swarm_has_worker_nodes() -> bool: async with docker_client() as client: # pylint: disable=not-async-context-manager nodes = await client.nodes.list(filters={"role": "worker"}) - if nodes: - return True - return False + return bool(nodes) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index d0275e85a03..6ef70c76e00 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -13,6 +13,7 @@ from simcore_service_director import config, resources pytest_plugins = [ + "fixtures.fake_services", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", @@ -70,7 +71,7 @@ def configure_swarm_stack_name(): @pytest.fixture -def configure_registry_access(docker_registry: str): +def configure_registry_access(docker_registry: str) -> None: config.REGISTRY_URL = docker_registry config.REGISTRY_PATH = docker_registry config.REGISTRY_SSL = False @@ -78,7 +79,7 @@ def configure_registry_access(docker_registry: str): @pytest.fixture(scope="session") -def configure_custom_registry(pytestconfig: pytest.Config): +def configure_custom_registry(pytestconfig: pytest.Config) -> None: # to set these values call # pytest --registry_url myregistry --registry_user username --registry_pw password config.REGISTRY_URL = pytestconfig.getoption("registry_url") diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index b709e422271..f2954c3c469 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -84,6 +84,7 @@ async def build_push_images( return list_of_pushed_images_tags yield build_push_images + _logger.info("clean registry") _clean_registry(registry_url, list_of_pushed_images_tags) _clean_registry(registry_url, dependent_images) @@ -185,7 +186,7 @@ def _clean_registry(registry_url, list_of_images): url = "http://{host}/v2/{name}/manifests/{tag}".format( host=registry_url, name=service_description["key"], tag=tag ) - response = requests.get(url, headers=request_headers) + response = requests.get(url, headers=request_headers, timeout=10) docker_content_digest = response.headers["Docker-Content-Digest"] # remove the image from the registry url = "http://{host}/v2/{name}/manifests/{digest}".format( @@ -193,7 +194,7 @@ def _clean_registry(registry_url, list_of_images): name=service_description["key"], digest=docker_content_digest, ) - response = requests.delete(url, headers=request_headers) + response = requests.delete(url, headers=request_headers, timeout=5) async def _create_base_image(labels, tag): @@ -223,7 +224,11 @@ def _create_service_description(service_type, name, tag): service_key_type = "comp" elif service_type == "dynamic": service_key_type = "dynamic" - service_desc["key"] = "simcore/services/" + service_key_type + "/" + name + else: + msg = f"Invalid {service_type=}" + raise ValueError(msg) + + service_desc["key"] = f"simcore/services/{service_key_type}/{name}" service_desc["version"] = tag service_desc["type"] = service_type diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/test_docker_utils.py index 76b4abd2051..3c9180f88cb 100644 --- a/services/director/tests/test_docker_utils.py +++ b/services/director/tests/test_docker_utils.py @@ -4,6 +4,7 @@ # pylint:disable=too-many-arguments # pylint: disable=not-async-context-manager from asyncio import sleep +from collections.abc import Callable import pytest from aiodocker.exceptions import DockerError @@ -37,30 +38,30 @@ async def test_docker_client(): (docker_utils.swarm_has_worker_nodes), ], ) -async def test_swarm_method_with_no_swarm(fct): +async def test_swarm_method_with_no_swarm(fct: Callable): # if this fails on your development machine run # `docker swarm leave --force` to leave the swarm with pytest.raises(DockerError): await fct() -async def test_swarm_get_number_nodes(docker_swarm): +async def test_swarm_get_number_nodes(docker_swarm: None): num_nodes = await docker_utils.swarm_get_number_nodes() assert num_nodes == 1 -async def test_swarm_has_manager_nodes(docker_swarm): +async def test_swarm_has_manager_nodes(docker_swarm: None): assert (await docker_utils.swarm_has_manager_nodes()) is True -async def test_swarm_has_worker_nodes(docker_swarm): +async def test_swarm_has_worker_nodes(docker_swarm: None): assert (await docker_utils.swarm_has_worker_nodes()) is False async def test_push_services( - push_services, - configure_registry_access, - configure_schemas_location, + push_services: Callable, + configure_registry_access: None, + configure_schemas_location: None, ): await push_services( number_of_computational_services=3, number_of_interactive_services=3 From c4a2e947c7e6952a5a8d637a36a2252a10f7db9e Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:56:59 +0100 Subject: [PATCH 064/201] test_settings --- services/director/tests/test_core_settings.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 services/director/tests/test_core_settings.py diff --git a/services/director/tests/test_core_settings.py b/services/director/tests/test_core_settings.py new file mode 100644 index 00000000000..24b07909702 --- /dev/null +++ b/services/director/tests/test_core_settings.py @@ -0,0 +1,24 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director.core.settings import ApplicationSettings + + +def test_valid_web_application_settings(app_environment: EnvVarsDict): + """ + We validate actual envfiles (e.g. repo.config files) by passing them via the CLI + + $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets + $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + + """ + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() + + assert app_environment["DIRECTOR_DEBUG"] == settings.DIRECTOR_DEBUG From 6f72c0fe0d4b649a813eb5c60d4746e8f39ec204 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 14:58:42 +0100 Subject: [PATCH 065/201] test_utils --- services/director/tests/test_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py index 8eeb7b410bd..c9126ca0a4d 100644 --- a/services/director/tests/test_utils.py +++ b/services/director/tests/test_utils.py @@ -1,5 +1,6 @@ from datetime import datetime +import arrow import pytest from simcore_service_director.utils import parse_as_datetime @@ -26,7 +27,7 @@ def test_parse_valid_time_strings(timestr): def test_parse_invalid_timestr(): - now = datetime.utcnow() + now = arrow.utcnow().datetime invalid_timestr = "2020-10-09T12:28" # w/ default, it should NOT raise From 53097ffbaa7644d15c5cc2429883a8e50b696a91 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:00:02 +0100 Subject: [PATCH 066/201] minor --- services/director/tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 6ef70c76e00..f6aefaf293c 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -47,7 +47,7 @@ def installed_package_dir() -> Path: @pytest.fixture(scope="session") -def common_schemas_specs_dir(osparc_simcore_root_dir: Path): +def common_schemas_specs_dir(osparc_simcore_root_dir: Path) -> Path: specs_dir = osparc_simcore_root_dir / "api" / "specs" / "director" / "schemas" assert specs_dir.exists() return specs_dir @@ -56,7 +56,7 @@ def common_schemas_specs_dir(osparc_simcore_root_dir: Path): @pytest.fixture def configure_schemas_location( installed_package_dir: Path, common_schemas_specs_dir: Path -): +) -> None: config.NODE_SCHEMA_LOCATION = str( common_schemas_specs_dir / "node-meta-v0.0.1.json" ) @@ -66,7 +66,7 @@ def configure_schemas_location( @pytest.fixture(scope="session") -def configure_swarm_stack_name(): +def configure_swarm_stack_name() -> None: config.SWARM_STACK_NAME = "test_stack" From 39dd2bff1ada868c1795ce6ea5ece8a18d5e497d Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:02:06 +0100 Subject: [PATCH 067/201] test_oas --- services/director/tests/conftest.py | 6 +++--- services/director/tests/test_oas.py | 17 ++++------------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index f6aefaf293c..458422c658a 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -1,7 +1,7 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except # pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import os from pathlib import Path diff --git a/services/director/tests/test_oas.py b/services/director/tests/test_oas.py index c62080a2391..186f3819737 100644 --- a/services/director/tests/test_oas.py +++ b/services/director/tests/test_oas.py @@ -1,22 +1,13 @@ # pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + -import pytest import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError from simcore_service_director import resources -def test_openapi_specs(): - openapi_path = resources.get_path(resources.RESOURCE_OPEN_API) - with resources.stream(resources.RESOURCE_OPEN_API) as fh: - specs = yaml.safe_load(fh) - try: - validate_spec(specs, spec_url=openapi_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) - - def test_server_specs(): with resources.stream(resources.RESOURCE_OPEN_API) as fh: specs = yaml.safe_load(fh) From 9b8b5ac07256b2e02cebe236ffb1c87c4cd0d41c Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:02:52 +0100 Subject: [PATCH 068/201] oas is auto-generated now --- .../director/tests/test_dummy_services.py | 4 +- services/director/tests/test_openapi.py | 25 -------- .../director/tests/test_openapi_schemas.py | 64 ------------------- 3 files changed, 2 insertions(+), 91 deletions(-) delete mode 100644 services/director/tests/test_openapi.py delete mode 100644 services/director/tests/test_openapi_schemas.py diff --git a/services/director/tests/test_dummy_services.py b/services/director/tests/test_dummy_services.py index 5427fd178c7..80e142c4601 100644 --- a/services/director/tests/test_dummy_services.py +++ b/services/director/tests/test_dummy_services.py @@ -1,7 +1,7 @@ +# pylint: disable=bare-except +# pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-import -# pylint: disable=bare-except -# pylint:disable=redefined-outer-name import json import logging diff --git a/services/director/tests/test_openapi.py b/services/director/tests/test_openapi.py deleted file mode 100644 index 36b25d16073..00000000000 --- a/services/director/tests/test_openapi.py +++ /dev/null @@ -1,25 +0,0 @@ -from pathlib import Path - -import pkg_resources -import pytest -import simcore_service_director -import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError -from simcore_service_director.resources import RESOURCE_OPEN_API - - -def test_specifications(): - # pylint: disable=no-value-for-parameter - spec_path = Path( - pkg_resources.resource_filename( - simcore_service_director.__name__, RESOURCE_OPEN_API - ) - ) - - with spec_path.open() as fh: - specs = yaml.safe_load(fh) - try: - validate_spec(specs, spec_url=spec_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) diff --git a/services/director/tests/test_openapi_schemas.py b/services/director/tests/test_openapi_schemas.py deleted file mode 100644 index 712ce3510ac..00000000000 --- a/services/director/tests/test_openapi_schemas.py +++ /dev/null @@ -1,64 +0,0 @@ -from pathlib import Path - -import pytest -import yaml -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPIValidationError -from simcore_service_director import resources - -API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - - -def correct_schema_local_references(schema_specs): - for key, value in schema_specs.items(): - if isinstance(value, dict): - correct_schema_local_references(value) - elif "$ref" in key and str(value).startswith("#/"): - # correct the reference - new_value = str(value).replace("#/", "#/components/schemas/") - schema_specs[key] = new_value - - -def add_namespace_for_converted_schemas(schema_specs): - # schemas converted from jsonschema do not have an overarching namespace. - # the openapi validator does not like this - # we use the jsonschema title to create a fake namespace - return {"FakeName": schema_specs} - - -def validate_individual_schemas(list_of_paths): - fake_openapi_headers = { - "openapi": "3.0.0", - "info": { - "title": "An include file to define sortable attributes", - "version": "1.0.0", - }, - "paths": {}, - "components": {"parameters": {}, "schemas": {}}, - } - - for spec_file_path in list_of_paths: - assert spec_file_path.exists() - # only consider schemas - if "openapi.yaml" not in str(spec_file_path.name) and "schemas" in str( - spec_file_path - ): - with spec_file_path.open() as file_ptr: - schema_specs = yaml.safe_load(file_ptr) - # correct local references - correct_schema_local_references(schema_specs) - if str(spec_file_path).endswith("-converted.yaml"): - schema_specs = add_namespace_for_converted_schemas(schema_specs) - fake_openapi_headers["components"]["schemas"] = schema_specs - try: - validate_spec(fake_openapi_headers, spec_url=spec_file_path.as_uri()) - except OpenAPIValidationError as err: - pytest.fail(err.message) - - -@pytest.mark.parametrize("version", API_VERSIONS) -def test_valid_individual_openapi_schemas_specs(version): - name = f"{resources.RESOURCE_OPENAPI_ROOT}/{version}/schemas" - schemas_folder_path = resources.get_path(name) - validate_individual_schemas(Path(schemas_folder_path).rglob("*.yaml")) - validate_individual_schemas(Path(schemas_folder_path).rglob("*.yml")) From 9c28b0e361a70c9d4253204262b022e172c4b74d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 16:49:06 +0100 Subject: [PATCH 069/201] migration --- .../cache_request_decorator.py | 14 +-- .../core/application.py | 3 + .../registry_cache_task.py | 57 +++++---- .../registry_proxy.py | 110 +++++++++--------- 4 files changed, 96 insertions(+), 88 deletions(-) diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py index 431a7216e90..67844911d3f 100644 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ b/services/director/src/simcore_service_director/cache_request_decorator.py @@ -1,26 +1,26 @@ from functools import wraps -from typing import Coroutine, Dict, Tuple +from typing import Coroutine -from aiohttp import web +from fastapi import FastAPI from simcore_service_director import config -def cache_requests(func: Coroutine, no_cache: bool = False): +def cache_requests(func: Coroutine, *, no_cache: bool = False): @wraps(func) async def wrapped( - app: web.Application, url: str, method: str, *args, **kwargs - ) -> Tuple[Dict, Dict]: + app: FastAPI, url: str, method: str, *args, **kwargs + ) -> tuple[dict, dict]: is_cache_enabled = config.DIRECTOR_REGISTRY_CACHING and method == "GET" cache_key = f"{url}:{method}" if is_cache_enabled and not no_cache: - cache_data = app[config.APP_REGISTRY_CACHE_DATA_KEY] + cache_data = app.state.registry_cache if cache_key in cache_data: return cache_data[cache_key] resp_data, resp_headers = await func(app, url, method, *args, **kwargs) if is_cache_enabled and not no_cache: - cache_data = app[config.APP_REGISTRY_CACHE_DATA_KEY] + cache_data = app.state.registry_cache cache_data[cache_key] = (resp_data, resp_headers) return (resp_data, resp_headers) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index f75f0c35336..f02d0f3d10e 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -4,6 +4,7 @@ from fastapi import FastAPI from servicelib.fastapi.tracing import setup_tracing +from .. import registry_cache_task from .._meta import ( API_VERSION, API_VTAG, @@ -49,6 +50,8 @@ def create_app(settings: ApplicationSettings) -> FastAPI: if app.state.settings.DIRECTOR_TRACING: setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) + registry_cache_task.setup(app) + # ERROR HANDLERS # EVENTS diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py index 10eca38b2b7..100152dfaad 100644 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ b/services/director/src/simcore_service_director/registry_cache_task.py @@ -1,22 +1,24 @@ import asyncio import logging -from typing import AsyncIterator -from aiohttp import web +from fastapi import FastAPI from servicelib.utils import logged_gather -from simcore_service_director import config, exceptions, registry_proxy -from simcore_service_director.config import APP_REGISTRY_CACHE_DATA_KEY + +from . import config, exceptions, registry_proxy +from .core.settings import ApplicationSettings _logger = logging.getLogger(__name__) TASK_NAME: str = __name__ + "_registry_caching_task" -async def registry_caching_task(app: web.Application) -> None: +async def registry_caching_task(app: FastAPI) -> None: try: _logger.info("%s: initializing cache...", TASK_NAME) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() + assert hasattr(app.state, "registry_cache") # nosec + assert isinstance(app.state.registry_cache, dict) # nosec + app.state.registry_cache.clear() await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) _logger.info("%s: initialisation completed", TASK_NAME) while True: @@ -24,7 +26,7 @@ async def registry_caching_task(app: web.Application) -> None: try: keys = [] refresh_tasks = [] - for key in app[APP_REGISTRY_CACHE_DATA_KEY]: + for key in app.state.registry_cache: path, method = key.split(":") _logger.debug("refresh %s:%s", method, path) refresh_tasks.append( @@ -32,18 +34,18 @@ async def registry_caching_task(app: web.Application) -> None: app, path, method, no_cache=True ) ) - keys = list(app[APP_REGISTRY_CACHE_DATA_KEY].keys()) + keys = list(app.state.registry_cache.keys()) results = await logged_gather(*refresh_tasks) - for key, result in zip(keys, results): - app[APP_REGISTRY_CACHE_DATA_KEY][key] = result + for key, result in zip(keys, results, strict=False): + app.state.registry_cache[key] = result except exceptions.DirectorException: # if the registry is temporarily not available this might happen _logger.exception( "%s: exception while refreshing cache, clean cache...", TASK_NAME ) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() + app.state.registry_cache.clear() _logger.info( "cache refreshed %s: sleeping for %ss...", @@ -57,23 +59,30 @@ async def registry_caching_task(app: web.Application) -> None: _logger.exception("%s: Unhandled exception while refreshing cache", TASK_NAME) finally: _logger.info("%s: finished task...clearing cache...", TASK_NAME) - app[APP_REGISTRY_CACHE_DATA_KEY].clear() - + app.state.registry_cache.clear() -async def setup_registry_caching_task(app: web.Application) -> AsyncIterator[None]: - app[APP_REGISTRY_CACHE_DATA_KEY] = {} - app[TASK_NAME] = asyncio.get_event_loop().create_task(registry_caching_task(app)) - yield +def setup(app: FastAPI) -> None: + async def on_startup() -> None: + app.state.registry_cache = {} + app.state.registry_cache_task = None + app_settings: ApplicationSettings = app.state.settings + if not app_settings.DIRECTOR_REGISTRY_CACHING: + _logger.info("Registry caching disabled") + return - task = app[TASK_NAME] - task.cancel() - await task + app.state.registry_cache = {} + app.state.registry_cache_task = asyncio.get_event_loop().create_task( + registry_caching_task(app) + ) + async def on_shutdown() -> None: + if app.state.registry_cache_task: + app.state.registry_cache_task.cancel() + await app.state.registry_cache_task -def setup(app: web.Application) -> None: - if config.DIRECTOR_REGISTRY_CACHING: - app.cleanup_ctx.append(setup_registry_caching_task) + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) -__all__ = ["setup", "APP_REGISTRY_CACHE_DATA_KEY"] +__all__ = ["setup"] diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 2c4591aefb6..d3e855a2ed7 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -6,10 +6,11 @@ import re from http import HTTPStatus from pprint import pformat -from typing import Any, AsyncIterator, Dict, List, Optional, Tuple +from typing import Any, AsyncIterator -from aiohttp import BasicAuth, ClientSession, client_exceptions, web +from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout +from fastapi import FastAPI from simcore_service_director import config, exceptions from simcore_service_director.cache_request_decorator import cache_requests from tenacity import retry @@ -39,8 +40,8 @@ class ServiceType(enum.Enum): async def _basic_auth_registry_request( - app: web.Application, path: str, method: str, **session_kwargs -) -> Tuple[Dict, Dict]: + app: FastAPI, path: str, method: str, **session_kwargs +) -> tuple[dict, dict]: if not config.REGISTRY_URL: raise exceptions.DirectorException("URL to registry is not defined") @@ -49,8 +50,8 @@ async def _basic_auth_registry_request( ) logger.debug("Requesting registry using %s", url) # try the registry with basic authentication first, spare 1 call - resp_data: Dict = {} - resp_headers: Dict = {} + resp_data: dict = {} + resp_headers: dict = {} auth = ( BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) if config.REGISTRY_AUTH and config.REGISTRY_USER and config.REGISTRY_PW @@ -87,21 +88,19 @@ async def _basic_auth_registry_request( return (resp_data, resp_headers) except client_exceptions.ClientError as exc: logger.exception("Unknown error while accessing registry: %s", str(exc)) - raise exceptions.DirectorException( - f"Unknown error while accessing registry: {str(exc)}" - ) + msg = f"Unknown error while accessing registry: {str(exc)}" + raise exceptions.DirectorException(msg) from exc async def _auth_registry_request( - url: URL, method: str, auth_headers: Dict, session: ClientSession, **kwargs -) -> Tuple[Dict, Dict]: + url: URL, method: str, auth_headers: dict, session: ClientSession, **kwargs +) -> tuple[dict, dict]: if not config.REGISTRY_AUTH or not config.REGISTRY_USER or not config.REGISTRY_PW: - raise exceptions.RegistryConnectionError( - "Wrong configuration: Authentication to registry is needed!" - ) + msg = "Wrong configuration: Authentication to registry is needed!" + raise exceptions.RegistryConnectionError(msg) # auth issue let's try some authentication get the auth type auth_type = None - auth_details: Dict[str, str] = {} + auth_details: dict[str, str] = {} for key in auth_headers: if str(key).lower() == "www-authenticate": auth_type, auth_value = str(auth_headers[key]).split(" ", 1) @@ -111,9 +110,8 @@ async def _auth_registry_request( } break if not auth_type: - raise exceptions.RegistryConnectionError( - "Unknown registry type: cannot deduce authentication method!" - ) + msg = "Unknown registry type: cannot deduce authentication method!" + raise exceptions.RegistryConnectionError(msg) auth = BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) # bearer type, it needs a token with all communications @@ -124,13 +122,10 @@ async def _auth_registry_request( ) async with session.get(token_url, auth=auth, **kwargs) as token_resp: if not token_resp.status == HTTPStatus.OK: - raise exceptions.RegistryConnectionError( - "Unknown error while authentifying with registry: {}".format( - str(token_resp) - ) - ) + msg = f"Unknown error while authentifying with registry: {token_resp!s}" + raise exceptions.RegistryConnectionError(msg) bearer_code = (await token_resp.json())["token"] - headers = {"Authorization": "Bearer {}".format(bearer_code)} + headers = {"Authorization": f"Bearer {bearer_code}"} async with getattr(session, method.lower())( url, headers=headers, **kwargs ) as resp_wtoken: @@ -163,39 +158,38 @@ async def _auth_registry_request( resp_data = await resp_wbasic.json(content_type=None) resp_headers = resp_wbasic.headers return (resp_data, resp_headers) - raise exceptions.RegistryConnectionError( - f"Unknown registry authentification type: {url}" - ) + msg = f"Unknown registry authentification type: {url}" + raise exceptions.RegistryConnectionError(msg) async def registry_request( - app: web.Application, + app: FastAPI, path: str, method: str = "GET", no_cache: bool = False, **session_kwargs, -) -> Tuple[Dict, Dict]: +) -> tuple[dict, dict]: logger.debug( "Request to registry: path=%s, method=%s. no_cache=%s", path, method, no_cache ) - return await cache_requests(_basic_auth_registry_request, no_cache)( + return await cache_requests(_basic_auth_registry_request, no_cache=no_cache)( app, path, method, **session_kwargs ) -async def is_registry_responsive(app: web.Application) -> bool: +async def is_registry_responsive(app: FastAPI) -> bool: path = "/v2/" try: await registry_request( app, path, no_cache=True, timeout=ClientTimeout(total=1.0) ) return True - except (exceptions.DirectorException, asyncio.TimeoutError) as exc: + except (TimeoutError, exceptions.DirectorException) as exc: logger.debug("Registry not responsive: %s", exc) return False -async def setup_registry(app: web.Application) -> AsyncIterator[None]: +async def setup_registry(app: FastAPI) -> AsyncIterator[None]: logger.debug("pinging registry...") @retry( @@ -204,7 +198,7 @@ async def setup_registry(app: web.Application) -> AsyncIterator[None]: retry=retry_if_result(lambda result: result == False), reraise=True, ) - async def wait_until_registry_responsive(app: web.Application) -> bool: + async def wait_until_registry_responsive(app: FastAPI) -> bool: return await is_registry_responsive(app) await wait_until_registry_responsive(app) @@ -212,11 +206,11 @@ async def wait_until_registry_responsive(app: web.Application) -> bool: yield -async def _list_repositories(app: web.Application) -> List[str]: +async def _list_repositories(app: FastAPI) -> list[str]: logger.debug("listing repositories") # if there are more repos, the Link will be available in the response headers until none available path = f"/v2/_catalog?n={NUMBER_OF_RETRIEVED_REPOS}" - repos_list: List = [] + repos_list: list = [] while True: result, headers = await registry_request(app, path) if result["repositories"]: @@ -228,9 +222,9 @@ async def _list_repositories(app: web.Application) -> List[str]: return repos_list -async def list_image_tags(app: web.Application, image_key: str) -> List[str]: +async def list_image_tags(app: FastAPI, image_key: str) -> list[str]: logger.debug("listing image tags in %s", image_key) - image_tags: List = [] + image_tags: list = [] # get list of image tags path = f"/v2/{image_key}/tags/list?n={NUMBER_OF_RETRIEVED_TAGS}" while True: @@ -243,10 +237,12 @@ async def list_image_tags(app: web.Application, image_key: str) -> List[str]: logger.debug("Found %s image tags in %s", len(image_tags), image_key) return image_tags + _DOCKER_CONTENT_DIGEST_HEADER = "Docker-Content-Digest" -async def get_image_digest(app: web.Application, image: str, tag: str) -> Optional[str]: - """ Returns image manifest digest number or None if fails to obtain it + +async def get_image_digest(app: FastAPI, image: str, tag: str) -> str | None: + """Returns image manifest digest number or None if fails to obtain it The manifest digest is essentially a SHA256 hash of the image manifest @@ -259,8 +255,10 @@ async def get_image_digest(app: web.Application, image: str, tag: str) -> Option return headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) -async def get_image_labels(app: web.Application, image: str, tag: str) -> Tuple[Dict, Optional[str]]: - """ Returns image labels and the image manifest digest """ +async def get_image_labels( + app: FastAPI, image: str, tag: str +) -> tuple[dict, str | None]: + """Returns image labels and the image manifest digest""" logger.debug("getting image labels of %s:%s", image, tag) path = f"/v2/{image}/manifests/{tag}" @@ -272,23 +270,21 @@ async def get_image_labels(app: web.Application, image: str, tag: str) -> Tuple[ labels = container_config["Labels"] headers = headers or {} - manifest_digest = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) + manifest_digest = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) logger.debug("retrieved labels of image %s:%s", image, tag) return (labels, manifest_digest) -async def get_image_details( - app: web.Application, image_key: str, image_tag: str -) -> Dict: - image_details: Dict = {} +async def get_image_details(app: FastAPI, image_key: str, image_tag: str) -> dict: + image_details: dict = {} labels, image_manifest_digest = await get_image_labels(app, image_key, image_tag) if image_manifest_digest: # Adds manifest as extra key in the response similar to org.opencontainers.image.base.digest # SEE https://github.com/opencontainers/image-spec/blob/main/annotations.md#pre-defined-annotation-keys - image_details.update({"image_digest":image_manifest_digest}) + image_details.update({"image_digest": image_manifest_digest}) if not labels: return image_details @@ -297,7 +293,7 @@ async def get_image_details( continue try: label_data = json.loads(labels[key]) - for label_key in label_data.keys(): + for label_key in label_data: image_details[label_key] = label_data[label_key] except json.decoder.JSONDecodeError: logging.exception( @@ -311,7 +307,7 @@ async def get_image_details( return image_details -async def get_repo_details(app: web.Application, image_key: str) -> List[Dict]: +async def get_repo_details(app: FastAPI, image_key: str) -> list[dict]: repo_details = [] image_tags = await list_image_tags(app, image_key) tasks = [get_image_details(app, image_key, tag) for tag in image_tags] @@ -322,7 +318,7 @@ async def get_repo_details(app: web.Application, image_key: str) -> List[Dict]: return repo_details -async def list_services(app: web.Application, service_type: ServiceType) -> List[Dict]: +async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: logger.debug("getting list of services") repos = await _list_repositories(app) # get the services repos @@ -347,8 +343,8 @@ async def list_services(app: web.Application, service_type: ServiceType) -> List async def list_interactive_service_dependencies( - app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: + app: FastAPI, service_key: str, service_tag: str +) -> list[dict]: image_labels, _ = await get_image_labels(app, service_key, service_tag) dependency_keys = [] if DEPENDENCIES_LABEL_KEY in image_labels: @@ -368,7 +364,7 @@ async def list_interactive_service_dependencies( def _get_prefix(service_type: ServiceType) -> str: - return "{}/{}/".format(config.SIMCORE_SERVICES_PREFIX, service_type.value) + return f"{config.SIMCORE_SERVICES_PREFIX}/{service_type.value}/" def get_service_first_name(image_key: str) -> str: @@ -407,7 +403,7 @@ def get_service_last_names(image_key: str) -> str: RESOURCES_ENTRY_NAME = "Resources".lower() -def _validate_kind(entry_to_validate: Dict[str, Any], kind_name: str): +def _validate_kind(entry_to_validate: dict[str, Any], kind_name: str): for element in ( entry_to_validate.get("value", {}) .get("Reservations", {}) @@ -419,8 +415,8 @@ def _validate_kind(entry_to_validate: Dict[str, Any], kind_name: str): async def get_service_extras( - app: web.Application, image_key: str, image_tag: str -) -> Dict[str, Any]: + app: FastAPI, image_key: str, image_tag: str +) -> dict[str, Any]: # check physical node requirements # all nodes require "CPU" result = { From b82a3697a8d01701150fc6d739b9d9f1c8497be5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 16:55:12 +0100 Subject: [PATCH 070/201] migrated to fastapi --- .../src/simcore_service_director/producer.py | 220 ++++++++---------- 1 file changed, 103 insertions(+), 117 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index b74da40c913..2414b3d13ee 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -2,15 +2,15 @@ import json import logging import re -from datetime import datetime, timedelta +from datetime import timedelta from distutils.version import StrictVersion from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Dict, List, Optional, Set, Tuple import aiodocker import aiohttp +import arrow import tenacity from aiohttp import ( ClientConnectionError, @@ -18,8 +18,8 @@ ClientResponse, ClientResponseError, ClientSession, - web, ) +from fastapi import FastAPI from servicelib.async_utils import run_sequentially_in_context from servicelib.monitor_services import service_started, service_stopped from tenacity import retry @@ -36,7 +36,6 @@ from .exceptions import ServiceStateSaveError from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw -from .utils import parse_as_datetime log = logging.getLogger(__name__) @@ -50,7 +49,7 @@ class ServiceState(Enum): FAILED = "failed" -async def _create_auth() -> Dict[str, str]: +async def _create_auth() -> dict[str, str]: return {"username": config.REGISTRY_USER, "password": config.REGISTRY_PW} @@ -68,20 +67,20 @@ async def _check_node_uuid_available( ) except aiodocker.exceptions.DockerError as err: log.exception("Error while retrieving services list") - raise exceptions.GenericDockerError( - "Error while retrieving services", err - ) from err + msg = "Error while retrieving services" + raise exceptions.GenericDockerError(msg, err) from err if list_of_running_services_w_uuid: raise exceptions.ServiceUUIDInUseError(node_uuid) log.debug("UUID %s is free", node_uuid) -def _check_setting_correctness(setting: Dict) -> None: +def _check_setting_correctness(setting: dict) -> None: if "name" not in setting or "type" not in setting or "value" not in setting: - raise exceptions.DirectorException("Invalid setting in %s" % setting) + msg = f"Invalid setting in {setting}" + raise exceptions.DirectorException(msg) -def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: +def _parse_mount_settings(settings: list[dict]) -> list[dict]: mounts = [] for s in settings: log.debug("Retrieved mount settings %s", s) @@ -105,7 +104,7 @@ def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: return mounts -def _parse_env_settings(settings: List[str]) -> Dict: +def _parse_env_settings(settings: list[str]) -> dict: envs = {} for s in settings: log.debug("Retrieved env settings %s", s) @@ -120,8 +119,8 @@ def _parse_env_settings(settings: List[str]) -> Dict: async def _read_service_settings( - app: web.Application, key: str, tag: str, settings_name: str -) -> Dict: + app: FastAPI, key: str, tag: str, settings_name: str +) -> dict: image_labels, _ = await registry_proxy.get_image_labels(app, key, tag) settings = ( json.loads(image_labels[settings_name]) if settings_name in image_labels else {} @@ -140,7 +139,7 @@ def _to_simcore_runtime_docker_label_key(key: str) -> str: # pylint: disable=too-many-branches async def _create_docker_service_params( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, service_key: str, service_tag: str, @@ -149,9 +148,9 @@ async def _create_docker_service_params( node_uuid: str, project_id: str, node_base_path: str, - internal_network_id: Optional[str], + internal_network_id: str | None, request_simcore_user_agent: str, -) -> Dict: +) -> dict: # pylint: disable=too-many-statements service_parameters_labels = await _read_service_settings( app, service_key, service_tag, config.SERVICE_RUNTIME_SETTINGS @@ -222,9 +221,11 @@ async def _create_docker_service_params( "task_template": { "ContainerSpec": container_spec, "Placement": { - "Constraints": ["node.role==worker"] - if await docker_utils.swarm_has_worker_nodes() - else [] + "Constraints": ( + ["node.role==worker"] + if await docker_utils.swarm_has_worker_nodes() + else [] + ) }, "RestartPolicy": { "Condition": "on-failure", @@ -258,9 +259,9 @@ async def _create_docker_service_params( ): "osparc", # fixed no legacy available in other products _to_simcore_runtime_docker_label_key("cpu_limit"): "0", _to_simcore_runtime_docker_label_key("memory_limit"): "0", - _to_simcore_runtime_docker_label_key("type"): "main" - if main_service - else "dependency", + _to_simcore_runtime_docker_label_key("type"): ( + "main" if main_service else "dependency" + ), "io.simcore.zone": f"{config.TRAEFIK_SIMCORE_ZONE}", "traefik.enable": "true" if main_service else "false", f"traefik.http.services.{service_name}.loadbalancer.server.port": "8080", @@ -281,10 +282,7 @@ async def _create_docker_service_params( if reverse_proxy_settings: # some services define strip_path:true if they need the path to be stripped away - if ( - "strip_path" in reverse_proxy_settings - and reverse_proxy_settings["strip_path"] - ): + if reverse_proxy_settings.get("strip_path"): docker_params["labels"][ f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" ] = f"^/x/{node_uuid}" @@ -292,8 +290,8 @@ async def _create_docker_service_params( f"traefik.http.routers.{service_name}.middlewares" ] += f", {service_name}_stripprefixregex" - placement_constraints_to_substitute: List[str] = [] - placement_substitutions: Dict[ + placement_constraints_to_substitute: list[str] = [] + placement_substitutions: dict[ str, str ] = config.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS @@ -336,7 +334,7 @@ async def _create_docker_service_params( "GenericResources" ] - to_remove: Set[str] = set() + to_remove: set[str] = set() for generic_resource in generic_resources: kind = generic_resource["DiscreteResourceSpec"]["Kind"] if kind in placement_substitutions: @@ -389,9 +387,9 @@ async def _create_docker_service_params( ) # placement constraints - elif param["name"] == "constraints": # python-API compatible - docker_params["task_template"]["Placement"]["Constraints"] += param["value"] - elif param["type"] == "Constraints": # REST-API compatible + elif ( + param["name"] == "constraints" or param["type"] == "Constraints" + ): # python-API compatible docker_params["task_template"]["Placement"]["Constraints"] += param["value"] elif param["name"] == "env": log.debug("Found env parameter %s", param["value"]) @@ -402,7 +400,7 @@ async def _create_docker_service_params( ) elif param["name"] == "mount": log.debug("Found mount parameter %s", param["value"]) - mount_settings: List[Dict] = _parse_mount_settings(param["value"]) + mount_settings: list[dict] = _parse_mount_settings(param["value"]) if mount_settings: docker_params["task_template"]["ContainerSpec"]["Mounts"].extend( mount_settings @@ -456,7 +454,7 @@ async def _create_docker_service_params( return docker_params -def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: +def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: log.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) @@ -466,10 +464,10 @@ def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: return "" -async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: +async def _get_swarm_network(client: aiodocker.docker.Docker) -> dict: network_name = "_default" if config.SIMCORE_SERVICES_NETWORK_NAME: - network_name = "{}".format(config.SIMCORE_SERVICES_NETWORK_NAME) + network_name = f"{config.SIMCORE_SERVICES_NETWORK_NAME}" # try to find the network name (usually named STACKNAME_default) networks = [ x @@ -488,8 +486,8 @@ async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: async def _get_docker_image_port_mapping( - service: Dict, -) -> Tuple[Optional[str], Optional[int]]: + service: dict, +) -> tuple[str | None, int | None]: log.debug("getting port published by service: %s", service["Spec"]["Name"]) published_ports = [] @@ -509,12 +507,11 @@ async def _get_docker_image_port_mapping( published_port = published_ports[0] if target_ports: target_port = target_ports[0] - else: - # if empty no port is published but there might still be an internal port defined - if _to_simcore_runtime_docker_label_key("port") in service["Spec"]["Labels"]: - target_port = int( - service["Spec"]["Labels"][_to_simcore_runtime_docker_label_key("port")] - ) + # if empty no port is published but there might still be an internal port defined + elif _to_simcore_runtime_docker_label_key("port") in service["Spec"]["Labels"]: + target_port = int( + service["Spec"]["Labels"][_to_simcore_runtime_docker_label_key("port")] + ) return published_port, target_port @@ -525,7 +522,7 @@ async def _get_docker_image_port_mapping( async def _pass_port_to_service( service_name: str, port: str, - service_boot_parameters_labels: Dict, + service_boot_parameters_labels: dict, session: ClientSession, ) -> None: for param in service_boot_parameters_labels: @@ -578,9 +575,8 @@ async def _create_overlay_network_in_swarm( return docker_network.id except aiodocker.exceptions.DockerError as err: log.exception("Error while creating network for service %s", service_name) - raise exceptions.GenericDockerError( - "Error while creating network", err - ) from err + msg = "Error while creating network" + raise exceptions.GenericDockerError(msg, err) from err async def _remove_overlay_network_of_swarm( @@ -607,22 +603,21 @@ async def _remove_overlay_network_of_swarm( log.exception( "Error while removing networks for service with uuid: %s", node_uuid ) - raise exceptions.GenericDockerError( - "Error while removing networks", err - ) from err + msg = "Error while removing networks" + raise exceptions.GenericDockerError(msg, err) from err async def _get_service_state( - client: aiodocker.docker.Docker, service: Dict -) -> Tuple[ServiceState, str]: + client: aiodocker.docker.Docker, service: dict +) -> tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] log.debug("Getting service %s state", service_name) tasks = await client.tasks.list(filters={"service": service_name}) # wait for tasks - task_started_time = datetime.utcnow() - while (datetime.utcnow() - task_started_time) < timedelta(seconds=20): + task_started_time = arrow.utcnow().datetime + while (arrow.utcnow().datetime - task_started_time) < timedelta(seconds=20): tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) tasks = [x for x in tasks if x["ServiceID"] == service["ID"]] @@ -666,11 +661,10 @@ async def _get_service_state( elif task_state in ("ready", "starting"): last_task_state = ServiceState.STARTING elif task_state in ("running"): - now = datetime.utcnow() + now = arrow.utcnow().datetime # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' - task_state_update_time = parse_as_datetime( - last_task["Status"]["Timestamp"], default=now - ) + last_task["Status"]["Timestamp"] + task_state_update_time = arrow.get(last_task["Status"]["Timestamp"]).datetime time_since_running = now - task_state_update_time log.debug("Now is %s, time since running mode is %s", now, time_since_running) @@ -688,7 +682,7 @@ async def _get_service_state( async def _wait_until_service_running_or_failed( - client: aiodocker.docker.Docker, service: Dict, node_uuid: str + client: aiodocker.docker.Docker, service: dict, node_uuid: str ) -> None: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] @@ -714,9 +708,7 @@ async def _wait_until_service_running_or_failed( log.debug("Waited for service %s to start", service_name) -async def _get_repos_from_key( - app: web.Application, service_key: str -) -> Dict[str, List[Dict]]: +async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[dict]]: # get the available image for the main service (syntax is image:tag) list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) @@ -735,15 +727,14 @@ async def _get_repos_from_key( async def _get_dependant_repos( - app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: + app: FastAPI, service_key: str, service_tag: str +) -> list[dict]: list_of_images = await _get_repos_from_key(app, service_key) tag = await _find_service_tag(list_of_images, service_key, service_tag) # look for dependencies - dependent_repositories = await registry_proxy.list_interactive_service_dependencies( + return await registry_proxy.list_interactive_service_dependencies( app, service_key, tag ) - return dependent_repositories _TAG_REGEX = re.compile(r"^\d+\.\d+\.\d+$") @@ -758,9 +749,9 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: Dict, service_key: str, service_tag: str + list_of_images: dict, service_key: str, service_tag: str ) -> str: - if not service_key in list_of_images: + if service_key not in list_of_images: raise exceptions.ServiceNotAvailableError( service_name=service_key, service_tag=service_tag ) @@ -785,7 +776,7 @@ async def _find_service_tag( async def _start_docker_service( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, user_id: str, project_id: str, @@ -794,9 +785,9 @@ async def _start_docker_service( main_service: bool, node_uuid: str, node_base_path: str, - internal_network_id: Optional[str], + internal_network_id: str | None, request_simcore_user_agent: str, -) -> Dict: # pylint: disable=R0913 +) -> dict: # pylint: disable=R0913 service_parameters = await _create_docker_service_params( app, client, @@ -821,9 +812,8 @@ async def _start_docker_service( service = await client.services.create(**service_parameters) if "ID" not in service: # error while starting service - raise exceptions.DirectorException( - "Error while starting service: {}".format(str(service)) - ) + msg = f"Error while starting service: {service!s}" + raise exceptions.DirectorException(msg) log.debug("Service started now waiting for it to run") # get the full info from docker @@ -848,7 +838,7 @@ async def _start_docker_service( service_name, published_port, service_boot_parameters_labels, session ) - container_meta_data = { + return { "published_port": published_port, "entry_point": service_entrypoint, "service_uuid": node_uuid, @@ -862,7 +852,6 @@ async def _start_docker_service( "user_id": user_id, "project_id": project_id, } - return container_meta_data except exceptions.ServiceStartTimeoutError: log.exception("Service failed to start") @@ -874,7 +863,7 @@ async def _start_docker_service( raise exceptions.ServiceNotAvailableError(service_key, service_tag) from err -async def _silent_service_cleanup(app: web.Application, node_uuid: str) -> None: +async def _silent_service_cleanup(app: FastAPI, node_uuid: str) -> None: try: await stop_service(app, node_uuid, False) except exceptions.DirectorException: @@ -882,15 +871,15 @@ async def _silent_service_cleanup(app: web.Application, node_uuid: str) -> None: async def _create_node( - app: web.Application, + app: FastAPI, client: aiodocker.docker.Docker, user_id: str, project_id: str, - list_of_services: List[Dict], + list_of_services: list[dict], node_uuid: str, node_base_path: str, request_simcore_user_agent: str, -) -> List[Dict]: # pylint: disable=R0913, R0915 +) -> list[dict]: # pylint: disable=R0913, R0915 log.debug( "Creating %s docker services for node %s and base path %s for user %s", len(list_of_services), @@ -930,8 +919,8 @@ async def _create_node( async def _get_service_key_version_from_docker_service( - service: Dict, -) -> Tuple[str, str]: + service: dict, +) -> tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) if not service_full_name.startswith(config.REGISTRY_PATH): raise exceptions.DirectorException( @@ -949,14 +938,14 @@ async def _get_service_key_version_from_docker_service( return service_key, service_tag -async def _get_service_basepath_from_docker_service(service: Dict) -> str: +async def _get_service_basepath_from_docker_service(service: dict) -> str: envs_list = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] envs_dict = dict(x.split("=") for x in envs_list) return envs_dict["SIMCORE_NODE_BASEPATH"] async def start_service( - app: web.Application, + app: FastAPI, user_id: str, project_id: str, service_key: str, @@ -964,7 +953,7 @@ async def start_service( node_uuid: str, node_base_path: str, request_simcore_user_agent: str, -) -> Dict: +) -> dict: # pylint: disable=C0103 log.debug( "starting service %s:%s using uuid %s, basepath %s", @@ -1010,8 +999,8 @@ async def start_service( async def _get_node_details( - app: web.Application, client: aiodocker.docker.Docker, service: Dict -) -> Dict: + app: FastAPI, client: aiodocker.docker.Docker, service: dict +) -> dict: service_key, service_tag = await _get_service_key_version_from_docker_service( service ) @@ -1040,7 +1029,7 @@ async def _get_node_details( # get the published port published_port, target_port = await _get_docker_image_port_mapping(service) - node_details = { + return { "published_port": published_port, "entry_point": service_entrypoint, "service_uuid": service_uuid, @@ -1054,12 +1043,11 @@ async def _get_node_details( "user_id": user_id, "project_id": project_id, } - return node_details async def get_services_details( - app: web.Application, user_id: Optional[str], study_id: Optional[str] -) -> List[Dict]: + app: FastAPI, user_id: str | None, study_id: str | None +) -> list[dict]: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: filters = [ @@ -1078,23 +1066,21 @@ async def get_services_details( filters={"label": filters} ) - services_details = [ + return [ await _get_node_details(app, client, service) for service in list_running_services ] - return services_details except aiodocker.exceptions.DockerError as err: log.exception( "Error while listing services with user_id, study_id %s, %s", user_id, study_id, ) - raise exceptions.GenericDockerError( - "Error while accessing container", err - ) from err + msg = "Error while accessing container" + raise exceptions.GenericDockerError(msg, err) from err -async def get_service_details(app: web.Application, node_uuid: str) -> Dict: +async def get_service_details(app: FastAPI, node_uuid: str) -> dict: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: list_running_services_with_uuid = await client.services.list( @@ -1116,15 +1102,13 @@ async def get_service_details(app: web.Application, node_uuid: str) -> Dict: msg="More than one docker service is labeled as main service" ) - node_details = await _get_node_details( + return await _get_node_details( app, client, list_running_services_with_uuid[0] ) - return node_details except aiodocker.exceptions.DockerError as err: log.exception("Error while accessing container with uuid: %s", node_uuid) - raise exceptions.GenericDockerError( - "Error while accessing container", err - ) from err + msg = "Error while accessing container" + raise exceptions.GenericDockerError(msg, err) from err @retry( @@ -1171,7 +1155,7 @@ async def _save_service_state(service_host_name: str, session: aiohttp.ClientSes @run_sequentially_in_context(target_args=["node_uuid"]) -async def stop_service(app: web.Application, node_uuid: str, save_state: bool) -> None: +async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: log.debug( "stopping service with node_uuid=%s, save_state=%s", node_uuid, save_state ) @@ -1189,9 +1173,8 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - ) except aiodocker.exceptions.DockerError as err: log.exception("Error while stopping container with uuid: %s", node_uuid) - raise exceptions.GenericDockerError( - "Error while stopping container", err - ) from err + msg = "Error while stopping container" + raise exceptions.GenericDockerError(msg, err) from err # error if no service with such an id exists if not list_running_services_with_uuid: @@ -1204,12 +1187,16 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - # FIXME: the exception for the 3d-viewer shall be removed once the dy-sidecar comes in service_host_name = "{}:{}{}".format( service_details["service_host"], - service_details["service_port"] - if service_details["service_port"] - else "80", - service_details["service_basepath"] - if not "3d-viewer" in service_details["service_host"] - else "", + ( + service_details["service_port"] + if service_details["service_port"] + else "80" + ), + ( + service_details["service_basepath"] + if "3d-viewer" not in service_details["service_host"] + else "" + ), ) # If state save is enforced @@ -1243,9 +1230,8 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - await client.services.delete(service["Spec"]["Name"]) except aiodocker.exceptions.DockerError as err: - raise exceptions.GenericDockerError( - "Error while removing services", err - ) from err + msg = "Error while removing services" + raise exceptions.GenericDockerError(msg, err) from err # remove network(s) log.debug("removed services, now removing network...") From d35c5941184a9c786c5eeef01cdf9e011bf7dff6 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:08:20 +0100 Subject: [PATCH 071/201] app fixture --- services/director/requirements/_test.in | 1 + services/director/requirements/_test.txt | 3 +++ services/director/tests/conftest.py | 22 ++++++++++++++++++++++ 3 files changed, 26 insertions(+) diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index eafeb199342..004f8396e81 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -9,6 +9,7 @@ --constraint _base.txt # testing +asgi_lifespan docker faker jsonref diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 656c294334f..1ccd0e2a907 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -2,6 +2,8 @@ anyio==4.6.2.post1 # via # -c requirements/_base.txt # httpx +asgi-lifespan==2.1.0 + # via -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt @@ -95,6 +97,7 @@ sniffio==1.3.1 # via # -c requirements/_base.txt # anyio + # asgi-lifespan # httpx termcolor==2.5.0 # via pytest-sugar diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 458422c658a..925c6c26d03 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -5,12 +5,17 @@ import os from pathlib import Path +from typing import AsyncIterator import pytest import simcore_service_director +from asgi_lifespan import LifespanManager +from fastapi import FastAPI from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director import config, resources +from simcore_service_director.core.application import create_app +from simcore_service_director.core.settings import ApplicationSettings pytest_plugins = [ "fixtures.fake_services", @@ -109,6 +114,23 @@ def app_environment( ) +MAX_TIME_FOR_APP_TO_STARTUP = 10 +MAX_TIME_FOR_APP_TO_SHUTDOWN = 10 + + +@pytest.fixture +async def app( + app_environment: EnvVarsDict, is_pdb_enabled: bool +) -> AsyncIterator[FastAPI]: + the_test_app = create_app(settings=ApplicationSettings.create_from_envs()) + async with LifespanManager( + the_test_app, + startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, + shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, + ): + yield the_test_app + + # @pytest.fixture # async def aiohttp_mock_app(loop, mocker): # print("client session started ...") From 859fef5b2ce248bb5a4724faeb8f9b542233c6dd Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 15:16:47 +0100 Subject: [PATCH 072/201] refactoring test_handlers --- services/director/tests/api/conftest.py | 34 ++ .../test_rest_running_interactive_services.py | 285 +++++++++++++ .../director/tests/api/test_rest_services.py | 256 +++++++++++ .../director/tests/fixtures/fake_services.py | 342 +++++++++------ services/director/tests/test_handlers.py | 400 ++++++++++-------- 5 files changed, 1011 insertions(+), 306 deletions(-) create mode 100644 services/director/tests/api/conftest.py create mode 100644 services/director/tests/api/test_rest_running_interactive_services.py create mode 100644 services/director/tests/api/test_rest_services.py diff --git a/services/director/tests/api/conftest.py b/services/director/tests/api/conftest.py new file mode 100644 index 00000000000..8cc186e8465 --- /dev/null +++ b/services/director/tests/api/conftest.py @@ -0,0 +1,34 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from collections.abc import AsyncIterator + +import httpx +import pytest +from fastapi import FastAPI +from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict +from httpx._transports.asgi import ASGITransport + + +@pytest.fixture +async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: + # - Needed for app to trigger start/stop event handlers + # - Prefer this client instead of fastapi.testclient.TestClient + async with httpx.AsyncClient( + app=app, + base_url="http://director.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + assert isinstance(client._transport, ASGITransport) + yield client + + +@pytest.fixture +async def created_services( + push_services: PushServicesCallable, +) -> list[ServiceInRegistryInfoDict]: + return await push_services( + number_of_computational_services=3, number_of_interactive_services=2 + ) diff --git a/services/director/tests/api/test_rest_running_interactive_services.py b/services/director/tests/api/test_rest_running_interactive_services.py new file mode 100644 index 00000000000..98c13317871 --- /dev/null +++ b/services/director/tests/api/test_rest_running_interactive_services.py @@ -0,0 +1,285 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import uuid + +import httpx +import pytest +from aioresponses.core import CallbackResult, aioresponses +from fastapi import status + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +@pytest.mark.skip( + reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" +) +async def test_running_services_post_and_delete_no_swarm( + configure_swarm_stack_name, + client: httpx.AsyncClient, + push_services, + user_id, + project_id, + api_version_prefix, +): + params = { + "user_id": "None", + "project_id": "None", + "service_uuid": "sdlfkj4", + "service_key": "simcore/services/comp/some-key", + } + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == 500, data + + +@pytest.mark.parametrize( + "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] +) +async def test_running_services_post_and_delete( + configure_swarm_stack_name, + client: httpx.AsyncClient, + push_services, + docker_swarm, + user_id, + project_id, + api_version_prefix, + save_state: bool | None, + expected_save_state_call: bool, + mocker, +): + params = {} + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + params = { + "user_id": "None", + "project_id": "None", + "service_uuid": "sdlfkj4", + "service_key": "None", + "service_tag": "None", # optional + "service_basepath": "None", # optional + } + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == status.HTTP_400_BAD_REQUEST, data + + params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" + params["service_tag"] = "1.2.3" + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + data = resp.json() + assert resp.status_code == status.HTTP_404_NOT_FOUND, data + + created_services = await push_services(0, 2) + assert len(created_services) == 2 + for created_service in created_services: + service_description = created_service["service_description"] + params["user_id"] = user_id + params["project_id"] = project_id + params["service_key"] = service_description["key"] + params["service_tag"] = service_description["version"] + service_port = created_service["internal_port"] + service_entry_point = created_service["entry_point"] + params["service_basepath"] = "/i/am/a/basepath" + params["service_uuid"] = str(uuid.uuid4()) + # start the service + resp = await client.post( + f"/{api_version_prefix}/running_interactive_services", params=params + ) + assert resp.status_code == status.HTTP_201_CREATED + assert resp.encoding == "application/json" + running_service_enveloped = resp.json() + assert isinstance(running_service_enveloped["data"], dict) + assert all( + k in running_service_enveloped["data"] + for k in [ + "service_uuid", + "service_key", + "service_version", + "published_port", + "entry_point", + "service_host", + "service_port", + "service_basepath", + ] + ) + assert ( + running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] + ) + assert running_service_enveloped["data"]["service_key"] == params["service_key"] + assert ( + running_service_enveloped["data"]["service_version"] + == params["service_tag"] + ) + assert running_service_enveloped["data"]["service_port"] == service_port + service_published_port = running_service_enveloped["data"]["published_port"] + assert not service_published_port + assert service_entry_point == running_service_enveloped["data"]["entry_point"] + service_host = running_service_enveloped["data"]["service_host"] + assert service_host == f"test_{params['service_uuid']}" + service_basepath = running_service_enveloped["data"]["service_basepath"] + assert service_basepath == params["service_basepath"] + + # get the service + resp = await client.request( + "GET", + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + ) + assert resp.status_code == status.HTTP_200_OK + text = resp.text + assert resp.encoding == "application/json", f"Got {text=}" + running_service_enveloped = resp.json() + assert isinstance(running_service_enveloped["data"], dict) + assert all( + k in running_service_enveloped["data"] + for k in [ + "service_uuid", + "service_key", + "service_version", + "published_port", + "entry_point", + ] + ) + assert ( + running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] + ) + assert running_service_enveloped["data"]["service_key"] == params["service_key"] + assert ( + running_service_enveloped["data"]["service_version"] + == params["service_tag"] + ) + assert ( + running_service_enveloped["data"]["published_port"] + == service_published_port + ) + assert running_service_enveloped["data"]["entry_point"] == service_entry_point + assert running_service_enveloped["data"]["service_host"] == service_host + assert running_service_enveloped["data"]["service_port"] == service_port + assert running_service_enveloped["data"]["service_basepath"] == service_basepath + + # stop the service + query_params = {} + if save_state: + query_params.update({"save_state": "true" if save_state else "false"}) + + mocked_save_state_cb = mocker.MagicMock( + return_value=CallbackResult(status=200, payload={}) + ) + PASSTHROUGH_REQUESTS_PREFIXES = [ + "http://127.0.0.1", + "http://localhost", + "unix://", # docker engine + "ws://", # websockets + ] + with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: + + # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- + mock.post( + f"http://{service_host}:{service_port}{service_basepath}/state", + status=200, + callback=mocked_save_state_cb, + ) + resp = await client.delete( + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + params=query_params, + ) + if expected_save_state_call: + mocked_save_state_cb.assert_called_once() + + text = resp.text + assert resp.status_code == status.HTTP_204_NO_CONTENT, text + assert resp.encoding == "application/json" + data = resp.json() + assert data is None + + +async def test_running_interactive_services_list_get( + client: httpx.AsyncClient, push_services, docker_swarm +): + """Test case for running_interactive_services_list_get + + Returns a list of interactive services + """ + user_ids = ["first_user_id", "second_user_id"] + project_ids = ["first_project_id", "second_project_id", "third_project_id"] + # prepare services + NUM_SERVICES = 1 + created_services = await push_services(0, NUM_SERVICES) + assert len(created_services) == NUM_SERVICES + # start the services + for user_id in user_ids: + for project_id in project_ids: + for created_service in created_services: + service_description = created_service["service_description"] + params = {} + params["user_id"] = user_id + params["project_id"] = project_id + params["service_key"] = service_description["key"] + params["service_tag"] = service_description["version"] + params["service_uuid"] = str(uuid.uuid4()) + # start the service + resp = await client.post( + "/v0/running_interactive_services", params=params + ) + assert resp.status_code == 201 + # get the list of services + for user_id in user_ids: + for project_id in project_ids: + params = {} + # list by user_id + params["user_id"] = user_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) + assert data + assert not error + services_list = data + assert len(services_list) == len(project_ids) * NUM_SERVICES + # list by user_id and project_id + params["project_id"] = project_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) + assert data + assert not error + services_list = data + assert len(services_list) == NUM_SERVICES + # list by project_id + params = {} + params["project_id"] = project_id + response = await client.get( + "/v0/running_interactive_services", params=params + ) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) + assert data + assert not error + services_list = data + assert len(services_list) == len(user_ids) * NUM_SERVICES diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py new file mode 100644 index 00000000000..6d5941a6044 --- /dev/null +++ b/services/director/tests/api/test_rest_services.py @@ -0,0 +1,256 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import json +from urllib.parse import quote + +import httpx +from fastapi import status +from fixtures.fake_services import ServiceInRegistryInfoDict +from helpers import json_schema_validator +from simcore_service_director import resources + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): + resp = await client.get(f"/{api_version_prefix}/") + + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + + data, error = _assert_response_and_unwrap_envelope(resp) + assert data + assert not error + + assert data["name"] == "simcore-service-director" + assert data["status"] == "SERVICE_RUNNING" + assert data["version"] == "0.1.0" + assert data["api_version"] == "0.1.0" + + +def _assert_services( + *, + expected: list[ServiceInRegistryInfoDict], + got: list[dict], + schema_version="v1", +): + assert len(expected) == len(got) + + expected_key_version_tuples = [ + (s["service_description"]["key"], s["service_description"]["version"]) + for s in expected + ] + + json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) + assert json_schema_path.exists() is True + with json_schema_path.open() as file_pt: + service_schema = json.load(file_pt) + + for service in got: + service.pop("image_digest", None) + if schema_version == "v1": + assert ( + expected_key_version_tuples.count((service["key"], service["version"])) + == 1 + ) + json_schema_validator.validate_instance_object(service, service_schema) + + +async def test_list_services_with_empty_registry( + docker_registry: str, + client: httpx.AsyncClient, + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + # empty case + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=[], got=services) + + +async def test_list_services( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=created_services, got=services) + + +async def test_get_service_bad_request( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) > 0 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not services + assert error + + +async def test_list_services_by_service_type( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) == 5 + + resp = await client.get( + f"/{api_version_prefix}/services?service_type=computational" + ) + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services + assert len(services) == 3 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services + assert len(services) == 2 + + +async def test_get_services_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + resp = await client.get( + f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + resp = await client.get( + f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_services_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + retrieved_services: list[dict] = [] + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/services/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + assert len(services) == 1 + + retrieved_services.append(services[0]) + + _assert_services(expected=created_services, got=retrieved_services) + + +async def test_get_service_labels( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for service in created_services: + service_description = service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/services/{key}/{version}/labels" + resp = await client.get(url) + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + + labels, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + + assert service["docker_labels"] == labels + + +async def test_get_services_extras_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get( + f"/{api_version_prefix}/service_extras/whatever/someversion" + ) + assert resp.status_code == status.HTTP_400_BAD_REQUEST + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_services_extras_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/service_extras/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + + service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert created_service["service_extras"] == service_extras diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index f2954c3c469..76785c039d0 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -1,13 +1,18 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import asyncio import json import logging import random +import sys +from collections.abc import Awaitable, Iterator from io import BytesIO from pathlib import Path +from typing import Any, Literal, Protocol, TypedDict import pytest import requests @@ -19,105 +24,132 @@ _logger = logging.getLogger(__name__) -@pytest.fixture() -def push_services(docker_registry, tmpdir): - registry_url = docker_registry - tmp_dir = Path(tmpdir) +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent - list_of_pushed_images_tags = [] - dependent_images = [] - async def build_push_images( - number_of_computational_services, - number_of_interactive_services, - inter_dependent_services=False, - bad_json_format=False, - version="1.0.", - ): - try: - dependent_image = None - if inter_dependent_services: - dependent_image = await _build_push_image( - tmp_dir, - registry_url, - "computational", - "dependency", - "10.52.999999", - None, - bad_json_format=bad_json_format, - ) - dependent_images.append(dependent_image) +class NodeRequirementsDict(TypedDict): + CPU: float + RAM: float - images_to_build = [] - - for image_index in range(number_of_computational_services): - images_to_build.append( - _build_push_image( - tmp_dir, - registry_url, - "computational", - "test", - version + str(image_index), - dependent_image, - bad_json_format=bad_json_format, - ) - ) - for image_index in range(number_of_interactive_services): - images_to_build.append( - _build_push_image( - tmp_dir, - registry_url, - "dynamic", - "test", - version + str(image_index), - dependent_image, - bad_json_format=bad_json_format, - ) - ) - results = await asyncio.gather(*images_to_build) - list_of_pushed_images_tags.extend(results) - except DockerError: - _logger.exception("Unexpected docker API error") - raise +class ServiceExtrasDict(TypedDict): + node_requirements: NodeRequirementsDict + build_date: str + vcs_ref: str + vcs_url: str - return list_of_pushed_images_tags - yield build_push_images +class ServiceDescriptionDict(TypedDict): + key: str + version: str + type: Literal["computational", "dynamic"] - _logger.info("clean registry") - _clean_registry(registry_url, list_of_pushed_images_tags) - _clean_registry(registry_url, dependent_images) + +class ServiceInRegistryInfoDict(TypedDict): + service_description: ServiceDescriptionDict + docker_labels: dict[str, Any] + image_path: str + internal_port: int | None + entry_point: str + service_extras: ServiceExtrasDict + + +def _create_service_description( + service_type: Literal["computational", "dynamic"], name: str, tag: str +) -> ServiceDescriptionDict: + service_desc = json.loads( + (CURRENT_DIR / "dummy_service_description-v1.json").read_text() + ) + + if service_type == "computational": + service_key_type = "comp" + elif service_type == "dynamic": + service_key_type = "dynamic" + else: + msg = f"Invalid {service_type=}" + raise ValueError(msg) + + service_desc["key"] = f"simcore/services/{service_key_type}/{name}" + service_desc["version"] = tag + service_desc["type"] = service_type + + return service_desc + + +def _create_docker_labels( + service_description: ServiceDescriptionDict, *, bad_json_format: bool +) -> dict[str, str]: + docker_labels = {} + for key, value in service_description.items(): + docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) + if bad_json_format: + docker_labels[".".join(["io", "simcore", key])] = ( + "d32;'" + docker_labels[".".join(["io", "simcore", key])] + ) + + return docker_labels + + +async def _create_base_image(labels, tag) -> dict[str, Any]: + dockerfile = """ +FROM alpine +CMD while true; do sleep 10; done + """ + f = BytesIO(dockerfile.encode("utf-8")) + tar_obj = utils.mktar_from_dockerfile(f) + + # build docker base image + docker = Docker() + base_docker_image = await docker.images.build( + fileobj=tar_obj, encoding="gzip", rm=True, labels=labels, tag=tag + ) + await docker.close() + return base_docker_image -async def _build_push_image( - docker_dir, - registry_url, - service_type, - name, - tag, +async def _build_and_push_image( + registry_url: str, + service_type: Literal["computational", "dynamic"], + name: str, + tag: str, dependent_image=None, *, - bad_json_format=False, -): # pylint: disable=R0913 + bad_json_format: bool = False, +) -> ServiceInRegistryInfoDict: # pylint: disable=R0913 # crate image service_description = _create_service_description(service_type, name, tag) - docker_labels = _create_docker_labels(service_description, bad_json_format) + docker_labels = _create_docker_labels( + service_description, bad_json_format=bad_json_format + ) additional_docker_labels = [ - {"name": "constraints", "type": "string", "value": ["node.role==manager"]} + { + "name": "constraints", + "type": "string", + "value": ["node.role==manager"], + } ] internal_port = None entry_point = "" if service_type == "dynamic": - internal_port = random.randint(1, 65535) + internal_port = random.randint(1, 65535) # noqa: S311 additional_docker_labels.append( - {"name": "ports", "type": "int", "value": internal_port} + { + "name": "ports", + "type": "int", + "value": internal_port, + } ) entry_point = "/test/entry_point" docker_labels["simcore.service.bootsettings"] = json.dumps( - [{"name": "entry_point", "type": "string", "value": entry_point}] + [ + { + "name": "entry_point", + "type": "string", + "value": entry_point, + } + ] ) docker_labels["simcore.service.settings"] = json.dumps(additional_docker_labels) if bad_json_format: @@ -142,15 +174,15 @@ async def _build_push_image( ) # create the typical org.label-schema labels - service_extras = { - "node_requirements": { - "CPU": DEFAULT_MAX_NANO_CPUS / 1e9, - "RAM": DEFAULT_MAX_MEMORY, - }, - "build_date": "2020-08-19T15:36:27Z", - "vcs_ref": "ca180ef1", - "vcs_url": "git@github.com:ITISFoundation/osparc-simcore.git", - } + service_extras = ServiceExtrasDict( + node_requirements=NodeRequirementsDict( + CPU=DEFAULT_MAX_NANO_CPUS / 1e9, + RAM=DEFAULT_MAX_MEMORY, + ), + build_date="2020-08-19T15:36:27Z", + vcs_ref="ca180ef1", + vcs_url="git@github.com:ITISFoundation/osparc-simcore.git", + ) docker_labels["org.label-schema.build-date"] = service_extras["build_date"] docker_labels["org.label-schema.schema-version"] = "1.0" docker_labels["org.label-schema.vcs-ref"] = service_extras["vcs_ref"] @@ -162,22 +194,26 @@ async def _build_push_image( await _create_base_image(docker_labels, image_tag) # push image to registry - docker = Docker() - await docker.images.push(image_tag) - await docker.close() + try: + docker = Docker() + await docker.images.push(image_tag) + finally: + await docker.close() + # remove image from host # docker.images.remove(image_tag) - return { - "service_description": service_description, - "docker_labels": docker_labels, - "image_path": image_tag, - "internal_port": internal_port, - "entry_point": entry_point, - "service_extras": service_extras, - } + + return ServiceInRegistryInfoDict( + service_description=service_description, + docker_labels=docker_labels, + image_path=image_tag, + internal_port=internal_port, + entry_point=entry_point, + service_extras=service_extras, + ) -def _clean_registry(registry_url, list_of_images): +def _clean_registry(registry_url: str, list_of_images: list[ServiceInRegistryInfoDict]): request_headers = {"accept": "application/vnd.docker.distribution.manifest.v2+json"} for image in list_of_images: service_description = image["service_description"] @@ -197,51 +233,83 @@ def _clean_registry(registry_url, list_of_images): response = requests.delete(url, headers=request_headers, timeout=5) -async def _create_base_image(labels, tag): - dockerfile = """ -FROM alpine -CMD while true; do sleep 10; done - """ - f = BytesIO(dockerfile.encode("utf-8")) - tar_obj = utils.mktar_from_dockerfile(f) +class PushServicesCallable(Protocol): + async def __call__( + self, + *, + number_of_computational_services: int, + number_of_interactive_services: int, + inter_dependent_services: bool = False, + bad_json_format: bool = False, + version="1.0.", + ) -> list[ServiceInRegistryInfoDict]: + ... - # build docker base image - docker = Docker() - base_docker_image = await docker.images.build( - fileobj=tar_obj, encoding="gzip", rm=True, labels=labels, tag=tag - ) - await docker.close() - return base_docker_image[0] +@pytest.fixture +def push_services(docker_registry: str) -> Iterator[PushServicesCallable]: + registry_url = docker_registry + list_of_pushed_images_tags: list[ServiceInRegistryInfoDict] = [] + dependent_images = [] -def _create_service_description(service_type, name, tag): - file_name = "dummy_service_description-v1.json" - dummy_description_path = Path(__file__).parent / file_name - with dummy_description_path.open() as file_pt: - service_desc = json.load(file_pt) + async def _build_push_images_to_docker_registry( + *, + number_of_computational_services, + number_of_interactive_services, + inter_dependent_services=False, + bad_json_format=False, + version="1.0.", + ) -> list[ServiceInRegistryInfoDict]: + try: + dependent_image = None + if inter_dependent_services: + dependent_image = await _build_and_push_image( + registry_url=registry_url, + service_type="computational", + name="dependency", + tag="10.52.999999", + dependent_image=None, + bad_json_format=bad_json_format, + ) + dependent_images.append(dependent_image) - if service_type == "computational": - service_key_type = "comp" - elif service_type == "dynamic": - service_key_type = "dynamic" - else: - msg = f"Invalid {service_type=}" - raise ValueError(msg) + images_to_build: list[Awaitable] = [ + _build_and_push_image( + registry_url=registry_url, + service_type="computational", + name="test", + tag=f"{version}{image_index}", + dependent_image=dependent_image, + bad_json_format=bad_json_format, + ) + for image_index in range(number_of_computational_services) + ] + + images_to_build.extend( + [ + _build_and_push_image( + registry_url=registry_url, + service_type="dynamic", + name="test", + tag=f"{version}{image_index}", + dependent_image=dependent_image, + bad_json_format=bad_json_format, + ) + for image_index in range(number_of_interactive_services) + ] + ) - service_desc["key"] = f"simcore/services/{service_key_type}/{name}" - service_desc["version"] = tag - service_desc["type"] = service_type + results = await asyncio.gather(*images_to_build) + list_of_pushed_images_tags.extend(results) - return service_desc + except DockerError: + _logger.exception("Docker API error while building and pushing images") + raise + return list_of_pushed_images_tags -def _create_docker_labels(service_description, bad_json_format): - docker_labels = {} - for key, value in service_description.items(): - docker_labels[".".join(["io", "simcore", key])] = json.dumps({key: value}) - if bad_json_format: - docker_labels[".".join(["io", "simcore", key])] = ( - "d32;'" + docker_labels[".".join(["io", "simcore", key])] - ) + yield _build_push_images_to_docker_registry - return docker_labels + _logger.info("clean registry") + _clean_registry(registry_url, list_of_pushed_images_tags) + _clean_registry(registry_url, dependent_images) diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index 3a326ef6ba2..3bac70c63f7 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -1,58 +1,73 @@ -# pylint: disable=unused-argument -# pylint: disable=unused-import -# pylint: disable=bare-except # pylint: disable=redefined-outer-name -# pylint: disable=R0915 +# pylint: disable=unused-argument +# pylint: disable=unused-variable # pylint: disable=too-many-arguments import json +import time import uuid +from collections.abc import AsyncIterator from urllib.parse import quote +import httpx import pytest from aioresponses.core import CallbackResult, aioresponses +from fastapi import FastAPI, status +from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict from helpers import json_schema_validator -from servicelib.rest_responses import ( # pylint: disable=no-name-in-module - unwrap_envelope, -) -from simcore_service_director import main, resources, rest +from httpx._transports.asgi import ASGITransport +from simcore_service_director import resources, rest @pytest.fixture -def client( - loop, - aiohttp_client, - aiohttp_unused_port, - configure_schemas_location, - configure_registry_access, -): - app = main.setup_app() - server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) +async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: + # - Needed for app to trigger start/stop event handlers + # - Prefer this client instead of fastapi.testclient.TestClient + async with httpx.AsyncClient( + app=app, + base_url="http://director.testserver.io", + headers={"Content-Type": "application/json"}, + ) as client: + assert isinstance(client._transport, ASGITransport) + yield client -async def test_root_get(client, api_version_prefix): - web_response = await client.get(f"/{api_version_prefix}/") - assert web_response.content_type == "application/json" - assert web_response.status == 200 - healthcheck_enveloped = await web_response.json() - assert "data" in healthcheck_enveloped +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" - assert isinstance(healthcheck_enveloped["data"], dict) + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") - healthcheck = healthcheck_enveloped["data"] - assert healthcheck["name"] == "simcore-service-director" - assert healthcheck["status"] == "SERVICE_RUNNING" - assert healthcheck["version"] == "0.1.0" - assert healthcheck["api_version"] == "0.1.0" +async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): + resp = await client.get(f"/{api_version_prefix}/") -def _check_services(created_services, services, schema_version="v1"): - assert len(created_services) == len(services) + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + + data, error = _assert_response_and_unwrap_envelope(resp) + assert data + assert not error + + assert data["name"] == "simcore-service-director" + assert data["status"] == "SERVICE_RUNNING" + assert data["version"] == "0.1.0" + assert data["api_version"] == "0.1.0" + + +def _assert_services( + *, + expected: list[ServiceInRegistryInfoDict], + got: list[ServiceInRegistryInfoDict], + schema_version="v1", +): + assert len(expected) == len(got) created_service_descriptions = [ (x["service_description"]["key"], x["service_description"]["version"]) - for x in created_services + for x in expected ] json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) @@ -60,8 +75,8 @@ def _check_services(created_services, services, schema_version="v1"): with json_schema_path.open() as file_pt: service_schema = json.load(file_pt) - for service in services: - service.pop("image_digest") + for service in got: + service.pop("image_digest", None) if schema_version == "v1": assert ( created_service_descriptions.count((service["key"], service["version"])) @@ -70,78 +85,121 @@ def _check_services(created_services, services, schema_version="v1"): json_schema_validator.validate_instance_object(service, service_schema) -async def test_services_get(docker_registry, client, push_services, api_version_prefix): +async def test_list_services_with_empty_registry( + docker_registry: str, + client: httpx.AsyncClient, + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + # empty case - web_response = await client.get(f"/{api_version_prefix}/services") - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - _check_services([], services) - - # some services - created_services = await push_services( + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=[], got=services) + + +@pytest.fixture +async def created_services( + push_services: PushServicesCallable, +) -> list[ServiceInRegistryInfoDict]: + return await push_services( number_of_computational_services=3, number_of_interactive_services=2 ) - web_response = await client.get(f"/{api_version_prefix}/services") - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - _check_services(created_services, services) - - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=blahblah" - ) - assert web_response.status == 400 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert "data" not in services_enveloped - assert "error" in services_enveloped - web_response = await client.get( + +async def test_list_services( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + + resp = await client.get(f"/{api_version_prefix}/services") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert isinstance(services, list) + + _assert_services(expected=created_services, got=services) + + +async def test_get_service_bad_request( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) > 0 + + resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not services + assert error + + +async def test_list_services_by_service_type( + docker_registry: str, + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert docker_registry, "docker-registry is not ready?" + assert len(created_services) == 5 + + resp = await client.get( f"/{api_version_prefix}/services?service_type=computational" ) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services assert len(services) == 3 - web_response = await client.get( - f"/{api_version_prefix}/services?service_type=interactive" - ) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] + resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") + assert resp.status_code == status.HTTP_200_OK + + services, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert services assert len(services) == 2 -async def test_services_by_key_version_get( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - web_response = await client.get( - f"/{api_version_prefix}/services/whatever/someversion" - ) - assert web_response.status == 400 - web_response = await client.get( +async def test_get_services_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") + assert resp.status_code == status.HTTP_400_BAD_REQUEST + + resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" ) - assert web_response.status == 404 - web_response = await client.get( + assert resp.status_code == status.HTTP_404_NOT_FOUND + + resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" ) - assert web_response.status == 404 + assert resp.status_code == status.HTTP_404_NOT_FOUND + - created_services = await push_services(3, 2) +async def test_get_services_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): assert len(created_services) == 5 - retrieved_services = [] + retrieved_services: list[ServiceInRegistryInfoDict] = [] for created_service in created_services: service_description = created_service["service_description"] # note that it is very important to remove the safe="/" from quote!!!! @@ -149,25 +207,26 @@ async def test_services_by_key_version_get( quote(service_description[key], safe="") for key in ("key", "version") ) url = f"/{api_version_prefix}/services/{key}/{version}" - web_response = await client.get(url) + resp = await client.get(url) - assert ( - web_response.status == 200 - ), await web_response.text() # here the error is actually json. - assert web_response.content_type == "application/json" - services_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" + assert resp.encoding == "application/json" + services_enveloped = resp.json() assert isinstance(services_enveloped["data"], list) services = services_enveloped["data"] assert len(services) == 1 retrieved_services.append(services[0]) - _check_services(created_services, retrieved_services) + + _assert_services(expected=created_services, got=retrieved_services) async def test_get_service_labels( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - created_services = await push_services(3, 2) + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 for service in created_services: service_description = service["service_description"] @@ -176,32 +235,37 @@ async def test_get_service_labels( quote(service_description[key], safe="") for key in ("key", "version") ) url = f"/{api_version_prefix}/services/{key}/{version}/labels" - web_response = await client.get(url) - assert web_response.status == 200, await web_response.text() + resp = await client.get(url) + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services_enveloped = await web_response.json() + services_enveloped = resp.json() labels = services_enveloped["data"] assert service["docker_labels"] == labels -async def test_services_extras_by_key_version_get( - client, push_services, api_version_prefix -): # pylint: disable=W0613, W0621 - web_response = await client.get( +async def test_get_services_extras_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get( f"/{api_version_prefix}/service_extras/whatever/someversion" ) - assert web_response.status == 400 - web_response = await client.get( + assert resp.status_code == status.HTTP_400_BAD_REQUEST + resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" ) - assert web_response.status == 404 - web_response = await client.get( + assert resp.status_code == status.HTTP_404_NOT_FOUND + resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" ) - assert web_response.status == 404 + assert resp.status_code == status.HTTP_404_NOT_FOUND + - created_services = await push_services(3, 2) +async def test_get_services_extras_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): assert len(created_services) == 5 for created_service in created_services: @@ -211,13 +275,11 @@ async def test_services_extras_by_key_version_get( quote(service_description[key], safe="") for key in ("key", "version") ) url = f"/{api_version_prefix}/service_extras/{key}/{version}" - web_response = await client.get(url) + resp = await client.get(url) - assert ( - web_response.status == 200 - ), await web_response.text() # here the error is actually json. - assert web_response.content_type == "application/json" - service_extras_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + assert resp.encoding == "application/json" + service_extras_enveloped = resp.json() assert isinstance(service_extras_enveloped["data"], dict) service_extras = service_extras_enveloped["data"] @@ -225,7 +287,7 @@ async def test_services_extras_by_key_version_get( async def _start_get_stop_services( - client, + client: httpx.AsyncClient, push_services, user_id, project_id, @@ -235,10 +297,10 @@ async def _start_get_stop_services( mocker, ): params = {} - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - assert web_response.status == 400 + assert resp.status_code == status.HTTP_400_BAD_REQUEST params = { "user_id": "None", @@ -248,19 +310,19 @@ async def _start_get_stop_services( "service_tag": "None", # optional "service_basepath": "None", # optional } - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - data = await web_response.json() - assert web_response.status == 400, data + data = resp.json() + assert resp.status_code == status.HTTP_400_BAD_REQUEST, data params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" params["service_tag"] = "1.2.3" - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - data = await web_response.json() - assert web_response.status == 404, data + data = resp.json() + assert resp.status_code == status.HTTP_404_NOT_FOUND, data created_services = await push_services(0, 2) assert len(created_services) == 2 @@ -275,12 +337,12 @@ async def _start_get_stop_services( params["service_basepath"] = "/i/am/a/basepath" params["service_uuid"] = str(uuid.uuid4()) # start the service - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - assert web_response.status == 201 - assert web_response.content_type == "application/json" - running_service_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_201_CREATED + assert resp.encoding == "application/json" + running_service_enveloped = resp.json() assert isinstance(running_service_enveloped["data"], dict) assert all( k in running_service_enveloped["data"] @@ -313,14 +375,14 @@ async def _start_get_stop_services( assert service_basepath == params["service_basepath"] # get the service - web_response = await client.request( + resp = await client.request( "GET", f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", ) - assert web_response.status == 200 - text = await web_response.text() - assert web_response.content_type == "application/json", text - running_service_enveloped = await web_response.json() + assert resp.status_code == status.HTTP_200_OK + text = resp.text + assert resp.encoding == "application/json", f"Got {text=}" + running_service_enveloped = resp.json() assert isinstance(running_service_enveloped["data"], dict) assert all( k in running_service_enveloped["data"] @@ -371,17 +433,17 @@ async def _start_get_stop_services( status=200, callback=mocked_save_state_cb, ) - web_response = await client.delete( + resp = await client.delete( f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", params=query_params, ) if expected_save_state_call: mocked_save_state_cb.assert_called_once() - text = await web_response.text() - assert web_response.status == 204, text - assert web_response.content_type == "application/json" - data = await web_response.json() + text = resp.text + assert resp.status_code == status.HTTP_204_NO_CONTENT, text + assert resp.encoding == "application/json" + data = resp.json() assert data is None @@ -390,7 +452,7 @@ async def _start_get_stop_services( ) async def test_running_services_post_and_delete_no_swarm( configure_swarm_stack_name, - client, + client: httpx.AsyncClient, push_services, user_id, project_id, @@ -402,11 +464,11 @@ async def test_running_services_post_and_delete_no_swarm( "service_uuid": "sdlfkj4", "service_key": "simcore/services/comp/some-key", } - web_response = await client.post( + resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - data = await web_response.json() - assert web_response.status == 500, data + data = resp.json() + assert resp.status_code == 500, data @pytest.mark.parametrize( @@ -414,7 +476,7 @@ async def test_running_services_post_and_delete_no_swarm( ) async def test_running_services_post_and_delete( configure_swarm_stack_name, - client, + client: httpx.AsyncClient, push_services, docker_swarm, user_id, @@ -437,7 +499,7 @@ async def test_running_services_post_and_delete( async def test_running_interactive_services_list_get( - client, push_services, docker_swarm + client: httpx.AsyncClient, push_services, docker_swarm ): """Test case for running_interactive_services_list_get @@ -461,10 +523,10 @@ async def test_running_interactive_services_list_get( params["service_tag"] = service_description["version"] params["service_uuid"] = str(uuid.uuid4()) # start the service - web_response = await client.post( + resp = await client.post( "/v0/running_interactive_services", params=params ) - assert web_response.status == 201 + assert resp.status_code == 201 # get the list of services for user_id in user_ids: for project_id in project_ids: @@ -472,12 +534,12 @@ async def test_running_interactive_services_list_get( # list by user_id params["user_id"] = user_id response = await client.get( - path="/v0/running_interactive_services", params=params + "/v0/running_interactive_services", params=params ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) assert data assert not error services_list = data @@ -485,12 +547,12 @@ async def test_running_interactive_services_list_get( # list by user_id and project_id params["project_id"] = project_id response = await client.get( - path="/v0/running_interactive_services", params=params + "/v0/running_interactive_services", params=params ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) assert data assert not error services_list = data @@ -499,12 +561,12 @@ async def test_running_interactive_services_list_get( params = {} params["project_id"] = project_id response = await client.get( - path="/v0/running_interactive_services", params=params + "/v0/running_interactive_services", params=params ) - assert response.status == 200, "Response body is : " + ( - await response.read() - ).decode("utf-8") - data, error = unwrap_envelope(await response.json()) + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response.json()) assert data assert not error services_list = data @@ -515,7 +577,6 @@ async def test_running_interactive_services_list_get( async def test_performance_get_services( loop, configure_custom_registry, configure_schemas_location ): - import time fake_request = "fake request" start_time = time.perf_counter() @@ -524,15 +585,16 @@ async def test_performance_get_services( for i in range(number_of_calls): print("calling iteration", i) start_time_i = time.perf_counter() - web_response = await rest.handlers.services_get(fake_request) - assert web_response.status == 200 - assert web_response.content_type == "application/json" - services_enveloped = json.loads(web_response.text) + resp = await rest.handlers.services_get(fake_request) + assert resp.status_code == status.HTTP_200_OK + assert resp.encoding == "application/json" + services_enveloped = json.loads(resp.text) assert isinstance(services_enveloped["data"], list) services = services_enveloped["data"] number_of_services = len(services) print("iteration completed in", (time.perf_counter() - start_time_i), "s") stop_time = time.perf_counter() + print( f"Time to run {number_of_calls} times: {stop_time - start_time}s, #services {number_of_services}, time per call {(stop_time - start_time) / number_of_calls / number_of_services}s/service" ) From e9c6fd44a5177601374dba807172e963acfbe908 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:13:04 +0100 Subject: [PATCH 073/201] rm test_handlers -> api --- services/director/tests/test_handlers.py | 600 ----------------------- 1 file changed, 600 deletions(-) delete mode 100644 services/director/tests/test_handlers.py diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py deleted file mode 100644 index 3bac70c63f7..00000000000 --- a/services/director/tests/test_handlers.py +++ /dev/null @@ -1,600 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - -import json -import time -import uuid -from collections.abc import AsyncIterator -from urllib.parse import quote - -import httpx -import pytest -from aioresponses.core import CallbackResult, aioresponses -from fastapi import FastAPI, status -from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict -from helpers import json_schema_validator -from httpx._transports.asgi import ASGITransport -from simcore_service_director import resources, rest - - -@pytest.fixture -async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: - # - Needed for app to trigger start/stop event handlers - # - Prefer this client instead of fastapi.testclient.TestClient - async with httpx.AsyncClient( - app=app, - base_url="http://director.testserver.io", - headers={"Content-Type": "application/json"}, - ) as client: - assert isinstance(client._transport, ASGITransport) - yield client - - -def _assert_response_and_unwrap_envelope(got: httpx.Response): - assert got.encoding == "application/json" - - body = got.json() - assert isinstance(body, dict) - assert "data" in body or "error" in body - return body.get("data"), body.get("error") - - -async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): - resp = await client.get(f"/{api_version_prefix}/") - - assert resp.is_success - assert resp.status_code == status.HTTP_200_OK - - data, error = _assert_response_and_unwrap_envelope(resp) - assert data - assert not error - - assert data["name"] == "simcore-service-director" - assert data["status"] == "SERVICE_RUNNING" - assert data["version"] == "0.1.0" - assert data["api_version"] == "0.1.0" - - -def _assert_services( - *, - expected: list[ServiceInRegistryInfoDict], - got: list[ServiceInRegistryInfoDict], - schema_version="v1", -): - assert len(expected) == len(got) - - created_service_descriptions = [ - (x["service_description"]["key"], x["service_description"]["version"]) - for x in expected - ] - - json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() is True - with json_schema_path.open() as file_pt: - service_schema = json.load(file_pt) - - for service in got: - service.pop("image_digest", None) - if schema_version == "v1": - assert ( - created_service_descriptions.count((service["key"], service["version"])) - == 1 - ) - json_schema_validator.validate_instance_object(service, service_schema) - - -async def test_list_services_with_empty_registry( - docker_registry: str, - client: httpx.AsyncClient, - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - - # empty case - resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert isinstance(services, list) - - _assert_services(expected=[], got=services) - - -@pytest.fixture -async def created_services( - push_services: PushServicesCallable, -) -> list[ServiceInRegistryInfoDict]: - return await push_services( - number_of_computational_services=3, number_of_interactive_services=2 - ) - - -async def test_list_services( - docker_registry: str, - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - - resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert isinstance(services, list) - - _assert_services(expected=created_services, got=services) - - -async def test_get_service_bad_request( - docker_registry: str, - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - assert len(created_services) > 0 - - resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") - assert resp.status_code == status.HTTP_400_BAD_REQUEST - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not services - assert error - - -async def test_list_services_by_service_type( - docker_registry: str, - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert docker_registry, "docker-registry is not ready?" - assert len(created_services) == 5 - - resp = await client.get( - f"/{api_version_prefix}/services?service_type=computational" - ) - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert services - assert len(services) == 3 - - resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") - assert resp.status_code == status.HTTP_200_OK - - services, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert services - assert len(services) == 2 - - -async def test_get_services_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str -): - resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") - assert resp.status_code == status.HTTP_400_BAD_REQUEST - - resp = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - resp = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - -async def test_get_services_by_key_and_version( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - retrieved_services: list[ServiceInRegistryInfoDict] = [] - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/services/{key}/{version}" - resp = await client.get(url) - - assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - assert resp.encoding == "application/json" - services_enveloped = resp.json() - - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - assert len(services) == 1 - retrieved_services.append(services[0]) - - _assert_services(expected=created_services, got=retrieved_services) - - -async def test_get_service_labels( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - for service in created_services: - service_description = service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/services/{key}/{version}/labels" - resp = await client.get(url) - assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - - services_enveloped = resp.json() - labels = services_enveloped["data"] - - assert service["docker_labels"] == labels - - -async def test_get_services_extras_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str -): - resp = await client.get( - f"/{api_version_prefix}/service_extras/whatever/someversion" - ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - -async def test_get_services_extras_by_key_and_version( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/service_extras/{key}/{version}" - resp = await client.get(url) - - assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" - assert resp.encoding == "application/json" - service_extras_enveloped = resp.json() - - assert isinstance(service_extras_enveloped["data"], dict) - service_extras = service_extras_enveloped["data"] - assert created_service["service_extras"] == service_extras - - -async def _start_get_stop_services( - client: httpx.AsyncClient, - push_services, - user_id, - project_id, - api_version_prefix: str, - save_state: bool | None, - expected_save_state_call: bool, - mocker, -): - params = {} - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST - - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "None", - "service_tag": "None", # optional - "service_basepath": "None", # optional - } - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == status.HTTP_400_BAD_REQUEST, data - - params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" - params["service_tag"] = "1.2.3" - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == status.HTTP_404_NOT_FOUND, data - - created_services = await push_services(0, 2) - assert len(created_services) == 2 - for created_service in created_services: - service_description = created_service["service_description"] - params["user_id"] = user_id - params["project_id"] = project_id - params["service_key"] = service_description["key"] - params["service_tag"] = service_description["version"] - service_port = created_service["internal_port"] - service_entry_point = created_service["entry_point"] - params["service_basepath"] = "/i/am/a/basepath" - params["service_uuid"] = str(uuid.uuid4()) - # start the service - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - assert resp.status_code == status.HTTP_201_CREATED - assert resp.encoding == "application/json" - running_service_enveloped = resp.json() - assert isinstance(running_service_enveloped["data"], dict) - assert all( - k in running_service_enveloped["data"] - for k in [ - "service_uuid", - "service_key", - "service_version", - "published_port", - "entry_point", - "service_host", - "service_port", - "service_basepath", - ] - ) - assert ( - running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] - ) - assert running_service_enveloped["data"]["service_key"] == params["service_key"] - assert ( - running_service_enveloped["data"]["service_version"] - == params["service_tag"] - ) - assert running_service_enveloped["data"]["service_port"] == service_port - service_published_port = running_service_enveloped["data"]["published_port"] - assert not service_published_port - assert service_entry_point == running_service_enveloped["data"]["entry_point"] - service_host = running_service_enveloped["data"]["service_host"] - assert service_host == f"test_{params['service_uuid']}" - service_basepath = running_service_enveloped["data"]["service_basepath"] - assert service_basepath == params["service_basepath"] - - # get the service - resp = await client.request( - "GET", - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - ) - assert resp.status_code == status.HTTP_200_OK - text = resp.text - assert resp.encoding == "application/json", f"Got {text=}" - running_service_enveloped = resp.json() - assert isinstance(running_service_enveloped["data"], dict) - assert all( - k in running_service_enveloped["data"] - for k in [ - "service_uuid", - "service_key", - "service_version", - "published_port", - "entry_point", - ] - ) - assert ( - running_service_enveloped["data"]["service_uuid"] == params["service_uuid"] - ) - assert running_service_enveloped["data"]["service_key"] == params["service_key"] - assert ( - running_service_enveloped["data"]["service_version"] - == params["service_tag"] - ) - assert ( - running_service_enveloped["data"]["published_port"] - == service_published_port - ) - assert running_service_enveloped["data"]["entry_point"] == service_entry_point - assert running_service_enveloped["data"]["service_host"] == service_host - assert running_service_enveloped["data"]["service_port"] == service_port - assert running_service_enveloped["data"]["service_basepath"] == service_basepath - - # stop the service - query_params = {} - if save_state: - query_params.update({"save_state": "true" if save_state else "false"}) - - mocked_save_state_cb = mocker.MagicMock( - return_value=CallbackResult(status=200, payload={}) - ) - PASSTHROUGH_REQUESTS_PREFIXES = [ - "http://127.0.0.1", - "http://localhost", - "unix://", # docker engine - "ws://", # websockets - ] - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - - # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- - mock.post( - f"http://{service_host}:{service_port}{service_basepath}/state", - status=200, - callback=mocked_save_state_cb, - ) - resp = await client.delete( - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - params=query_params, - ) - if expected_save_state_call: - mocked_save_state_cb.assert_called_once() - - text = resp.text - assert resp.status_code == status.HTTP_204_NO_CONTENT, text - assert resp.encoding == "application/json" - data = resp.json() - assert data is None - - -@pytest.mark.skip( - reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" -) -async def test_running_services_post_and_delete_no_swarm( - configure_swarm_stack_name, - client: httpx.AsyncClient, - push_services, - user_id, - project_id, - api_version_prefix, -): - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "simcore/services/comp/some-key", - } - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == 500, data - - -@pytest.mark.parametrize( - "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] -) -async def test_running_services_post_and_delete( - configure_swarm_stack_name, - client: httpx.AsyncClient, - push_services, - docker_swarm, - user_id, - project_id, - api_version_prefix, - save_state: bool | None, - expected_save_state_call: bool, - mocker, -): - await _start_get_stop_services( - client, - push_services, - user_id, - project_id, - api_version_prefix, - save_state, - expected_save_state_call, - mocker, - ) - - -async def test_running_interactive_services_list_get( - client: httpx.AsyncClient, push_services, docker_swarm -): - """Test case for running_interactive_services_list_get - - Returns a list of interactive services - """ - user_ids = ["first_user_id", "second_user_id"] - project_ids = ["first_project_id", "second_project_id", "third_project_id"] - # prepare services - NUM_SERVICES = 1 - created_services = await push_services(0, NUM_SERVICES) - assert len(created_services) == NUM_SERVICES - # start the services - for user_id in user_ids: - for project_id in project_ids: - for created_service in created_services: - service_description = created_service["service_description"] - params = {} - params["user_id"] = user_id - params["project_id"] = project_id - params["service_key"] = service_description["key"] - params["service_tag"] = service_description["version"] - params["service_uuid"] = str(uuid.uuid4()) - # start the service - resp = await client.post( - "/v0/running_interactive_services", params=params - ) - assert resp.status_code == 201 - # get the list of services - for user_id in user_ids: - for project_id in project_ids: - params = {} - # list by user_id - params["user_id"] = user_id - response = await client.get( - "/v0/running_interactive_services", params=params - ) - assert ( - response.status_code == status.HTTP_200_OK - ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == len(project_ids) * NUM_SERVICES - # list by user_id and project_id - params["project_id"] = project_id - response = await client.get( - "/v0/running_interactive_services", params=params - ) - assert ( - response.status_code == status.HTTP_200_OK - ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == NUM_SERVICES - # list by project_id - params = {} - params["project_id"] = project_id - response = await client.get( - "/v0/running_interactive_services", params=params - ) - assert ( - response.status_code == status.HTTP_200_OK - ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) - assert data - assert not error - services_list = data - assert len(services_list) == len(user_ids) * NUM_SERVICES - - -@pytest.mark.skip(reason="test needs credentials to real registry") -async def test_performance_get_services( - loop, configure_custom_registry, configure_schemas_location -): - - fake_request = "fake request" - start_time = time.perf_counter() - number_of_calls = 1 - number_of_services = 0 - for i in range(number_of_calls): - print("calling iteration", i) - start_time_i = time.perf_counter() - resp = await rest.handlers.services_get(fake_request) - assert resp.status_code == status.HTTP_200_OK - assert resp.encoding == "application/json" - services_enveloped = json.loads(resp.text) - assert isinstance(services_enveloped["data"], list) - services = services_enveloped["data"] - number_of_services = len(services) - print("iteration completed in", (time.perf_counter() - start_time_i), "s") - stop_time = time.perf_counter() - - print( - f"Time to run {number_of_calls} times: {stop_time - start_time}s, #services {number_of_services}, time per call {(stop_time - start_time) / number_of_calls / number_of_services}s/service" - ) From 290d51eeada07110a99a21a76b37d351ee9e0665 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:14:30 +0100 Subject: [PATCH 074/201] migrated list services --- .../api/rest/_services.py | 48 +++++++++++++++++-- 1 file changed, 44 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 2bf1b066bf3..57601c83f0b 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -1,15 +1,55 @@ +import logging +from typing import Annotated, Any + import arrow -from fastapi import APIRouter +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.generics import Envelope from models_library.services_enums import ServiceType from models_library.services_types import ServiceKey, ServiceVersion +from servicelib.fastapi.dependencies import get_app + +from ... import exceptions, registry_proxy router = APIRouter() +log = logging.getLogger(__name__) + @router.get("/services") -async def list_services(service_type: ServiceType | None = None): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +async def list_services( + the_app: Annotated[FastAPI, Depends(get_app)], + service_type: ServiceType | None = None, +): + log.debug( + "Client does list_services request with service_type %s", + service_type, + ) + try: + services: list[dict[str, Any]] = [] + if not service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.ALL + ) + elif "computational" in service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.COMPUTATIONAL + ) + elif "interactive" in service_type: + services = await registry_proxy.list_services( + the_app, registry_proxy.ServiceType.DYNAMIC + ) + # NOTE: the validation is done in the catalog. This entrypoint IS and MUST BE only used by the catalog!! + # NOTE2: the catalog will directly talk to the registry see case #2165 [https://github.com/ITISFoundation/osparc-simcore/issues/2165] + # services = node_validator.validate_nodes(services) + return Envelope[list[dict[str, Any]]](data=services) + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err @router.get("/services/{service_key}/{service_version}") From 6a3d01956ec06425de352e5cf5a4d67799e4efb5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:18:49 +0100 Subject: [PATCH 075/201] migrated services endpoint --- .../api/rest/_services.py | 62 ++++++++++++++++--- 1 file changed, 54 insertions(+), 8 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 57601c83f0b..7e3bdb91d51 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -1,7 +1,6 @@ import logging from typing import Annotated, Any -import arrow from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from models_library.generics import Envelope from models_library.services_enums import ServiceType @@ -19,7 +18,7 @@ async def list_services( the_app: Annotated[FastAPI, Depends(get_app)], service_type: ServiceType | None = None, -): +) -> Envelope[list[dict[str, Any]]]: log.debug( "Client does list_services request with service_type %s", service_type, @@ -54,17 +53,64 @@ async def list_services( @router.get("/services/{service_key}/{service_version}") async def get_service( + the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +) -> Envelope[list[dict[str, Any]]]: + log.debug( + "Client does get_service with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + services = [ + await registry_proxy.get_image_details( + the_app, service_key, service_version + ) + ] + return Envelope[list[dict[str, Any]]](data=services) + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err @router.get("/services/{service_key}/{service_version}/labels") async def list_service_labels( + the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +) -> Envelope[dict[str, Any]]: + log.debug( + "Retrieving service labels with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + service_labels, _ = await registry_proxy.get_image_labels( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_labels) + + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err From 8a6dbec2f3fe4ee88f90a1e68ac21121f29188fd Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:25:17 +0100 Subject: [PATCH 076/201] cli and models --- .../models/__init__.py | 0 services/director/tests/conftest.py | 3 +- .../director/tests/test__model_examples.py | 28 +++++++++++++++ services/director/tests/test_cli.py | 34 +++++++++++++++++++ 4 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 services/director/src/simcore_service_director/models/__init__.py create mode 100644 services/director/tests/test__model_examples.py create mode 100644 services/director/tests/test_cli.py diff --git a/services/director/src/simcore_service_director/models/__init__.py b/services/director/src/simcore_service_director/models/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 925c6c26d03..d09e4623d40 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -4,8 +4,8 @@ # pylint: disable=too-many-arguments import os +from collections.abc import AsyncIterator from pathlib import Path -from typing import AsyncIterator import pytest import simcore_service_director @@ -19,6 +19,7 @@ pytest_plugins = [ "fixtures.fake_services", + "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", diff --git a/services/director/tests/test__model_examples.py b/services/director/tests/test__model_examples.py new file mode 100644 index 00000000000..d9604d738d6 --- /dev/null +++ b/services/director/tests/test__model_examples.py @@ -0,0 +1,28 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import json +from typing import Any + +import pytest +import simcore_service_director.models +from pydantic import BaseModel, ValidationError +from pytest_simcore.pydantic_models import walk_model_examples_in_package + + +@pytest.mark.parametrize( + "model_cls, example_name, example_data", + walk_model_examples_in_package(simcore_service_director.models), +) +def test_director_service_model_examples( + model_cls: type[BaseModel], example_name: int, example_data: Any +): + try: + assert model_cls.parse_obj(example_data) is not None + except ValidationError as err: + pytest.fail( + f"\n{example_name}: {json.dumps(example_data, indent=1)}\nError: {err}" + ) diff --git a/services/director/tests/test_cli.py b/services/director/tests/test_cli.py new file mode 100644 index 00000000000..3b42989bcff --- /dev/null +++ b/services/director/tests/test_cli.py @@ -0,0 +1,34 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +import os + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_director._meta import API_VERSION +from simcore_service_director.cli import main +from simcore_service_director.core.settings import ApplicationSettings +from typer.testing import CliRunner + + +def test_cli_help_and_version(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + + result = cli_runner.invoke(main, "--version") + assert result.exit_code == os.EX_OK, result.output + assert result.stdout.strip() == API_VERSION + + +def test_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK + + settings = ApplicationSettings.parse_raw(result.output) + assert settings.dict() == ApplicationSettings.create_from_envs().dict() + + +def test_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, ["run"]) + assert result.exit_code == 0 + assert "disabled" in result.stdout From 8979952bd2f1b77e24db58a4e263aa849d53a2f0 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:29:11 +0100 Subject: [PATCH 077/201] test healthcheck --- services/director/tests/api/conftest.py | 2 +- services/director/tests/api/test_rest_health.py | 15 +++++++++++++++ .../director/tests/api/test_rest_services.py | 16 ---------------- 3 files changed, 16 insertions(+), 17 deletions(-) create mode 100644 services/director/tests/api/test_rest_health.py diff --git a/services/director/tests/api/conftest.py b/services/director/tests/api/conftest.py index 8cc186e8465..c1d010bb3a2 100644 --- a/services/director/tests/api/conftest.py +++ b/services/director/tests/api/conftest.py @@ -21,7 +21,7 @@ async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: base_url="http://director.testserver.io", headers={"Content-Type": "application/json"}, ) as client: - assert isinstance(client._transport, ASGITransport) + assert isinstance(getattr(client, "_transport", None), ASGITransport) yield client diff --git a/services/director/tests/api/test_rest_health.py b/services/director/tests/api/test_rest_health.py new file mode 100644 index 00000000000..b1e6db622a4 --- /dev/null +++ b/services/director/tests/api/test_rest_health.py @@ -0,0 +1,15 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +import httpx +from fastapi import status + + +async def test_healthcheck(client: httpx.AsyncClient, api_version_prefix: str): + resp = await client.get(f"/{api_version_prefix}/") + + assert resp.is_success + assert resp.status_code == status.HTTP_200_OK + assert "simcore_service_director" in resp.text diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py index 6d5941a6044..966d64c7974 100644 --- a/services/director/tests/api/test_rest_services.py +++ b/services/director/tests/api/test_rest_services.py @@ -22,22 +22,6 @@ def _assert_response_and_unwrap_envelope(got: httpx.Response): return body.get("data"), body.get("error") -async def test_get_root_path(client: httpx.AsyncClient, api_version_prefix: str): - resp = await client.get(f"/{api_version_prefix}/") - - assert resp.is_success - assert resp.status_code == status.HTTP_200_OK - - data, error = _assert_response_and_unwrap_envelope(resp) - assert data - assert not error - - assert data["name"] == "simcore-service-director" - assert data["status"] == "SERVICE_RUNNING" - assert data["version"] == "0.1.0" - assert data["api_version"] == "0.1.0" - - def _assert_services( *, expected: list[ServiceInRegistryInfoDict], From 1454f102c842809c2bcd615cd6a4646fb371e5c6 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:35:23 +0100 Subject: [PATCH 078/201] split service extras --- .../director/tests/api/test_rest_services.py | 41 +------------ .../tests/api/test_rest_services_extras.py | 59 +++++++++++++++++++ 2 files changed, 60 insertions(+), 40 deletions(-) create mode 100644 services/director/tests/api/test_rest_services_extras.py diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py index 966d64c7974..3c511e0ef61 100644 --- a/services/director/tests/api/test_rest_services.py +++ b/services/director/tests/api/test_rest_services.py @@ -35,6 +35,7 @@ def _assert_services( for s in expected ] + # TODO: check these are correct! json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) assert json_schema_path.exists() is True with json_schema_path.open() as file_pt: @@ -198,43 +199,3 @@ async def test_get_service_labels( assert not error assert service["docker_labels"] == labels - - -async def test_get_services_extras_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str -): - resp = await client.get( - f"/{api_version_prefix}/service_extras/whatever/someversion" - ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - resp = await client.get( - f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" - ) - assert resp.status_code == status.HTTP_404_NOT_FOUND - - -async def test_get_services_extras_by_key_and_version( - client: httpx.AsyncClient, - created_services: list[ServiceInRegistryInfoDict], - api_version_prefix: str, -): - assert len(created_services) == 5 - - for created_service in created_services: - service_description = created_service["service_description"] - # note that it is very important to remove the safe="/" from quote!!!! - key, version = ( - quote(service_description[key], safe="") for key in ("key", "version") - ) - url = f"/{api_version_prefix}/service_extras/{key}/{version}" - resp = await client.get(url) - - assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" - - service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) - assert not error - assert created_service["service_extras"] == service_extras diff --git a/services/director/tests/api/test_rest_services_extras.py b/services/director/tests/api/test_rest_services_extras.py new file mode 100644 index 00000000000..e87a7e4cf4c --- /dev/null +++ b/services/director/tests/api/test_rest_services_extras.py @@ -0,0 +1,59 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + +from urllib.parse import quote + +import httpx +from fastapi import status +from fixtures.fake_services import ServiceInRegistryInfoDict + + +def _assert_response_and_unwrap_envelope(got: httpx.Response): + assert got.encoding == "application/json" + + body = got.json() + assert isinstance(body, dict) + assert "data" in body or "error" in body + return body.get("data"), body.get("error") + + +async def test_get_services_extras_by_key_and_version_with_empty_registry( + client: httpx.AsyncClient, api_version_prefix: str +): + resp = await client.get( + f"/{api_version_prefix}/service_extras/whatever/someversion" + ) + assert resp.status_code == status.HTTP_400_BAD_REQUEST + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + resp = await client.get( + f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" + ) + assert resp.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_services_extras_by_key_and_version( + client: httpx.AsyncClient, + created_services: list[ServiceInRegistryInfoDict], + api_version_prefix: str, +): + assert len(created_services) == 5 + + for created_service in created_services: + service_description = created_service["service_description"] + # note that it is very important to remove the safe="/" from quote!!!! + key, version = ( + quote(service_description[key], safe="") for key in ("key", "version") + ) + url = f"/{api_version_prefix}/service_extras/{key}/{version}" + resp = await client.get(url) + + assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" + + service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) + assert not error + assert created_service["service_extras"] == service_extras From 6ae3476921185b05fee8b50a72366be9ea5d42d4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:35:35 +0100 Subject: [PATCH 079/201] split service extras --- .../{test_rest_services_extras.py => test_rest_service_extras.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/director/tests/api/{test_rest_services_extras.py => test_rest_service_extras.py} (100%) diff --git a/services/director/tests/api/test_rest_services_extras.py b/services/director/tests/api/test_rest_service_extras.py similarity index 100% rename from services/director/tests/api/test_rest_services_extras.py rename to services/director/tests/api/test_rest_service_extras.py From 34d86f52d9804725bb993c056457a556b2478ed3 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 17:54:13 +0100 Subject: [PATCH 080/201] migrated service extras endpoint --- .../api/rest/_service_extras.py | 37 +++++++++++++++++-- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index 1c30f411e22..f301a74a429 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -1,14 +1,43 @@ -import arrow -from fastapi import APIRouter +import logging +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.generics import Envelope from models_library.services_types import ServiceKey, ServiceVersion +from servicelib.fastapi.dependencies import get_app + +from ... import exceptions, registry_proxy router = APIRouter() +log = logging.getLogger(__name__) + @router.get("/service_extras/{service_key}/{service_version}") async def list_service_extras( + the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, ): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + log.debug( + "Client does service_extras_by_key_version_get request with service_key %s, service_version %s", + service_key, + service_version, + ) + try: + service_extras = await registry_proxy.get_service_extras( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_extras) + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err From d3cb99364008e09137615e2621524035890919b2 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:06:11 +0100 Subject: [PATCH 081/201] import fix --- .../director/src/simcore_service_director/api/rest/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py index 82ccec19302..d00722a5b6e 100644 --- a/services/director/src/simcore_service_director/api/rest/routes.py +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -4,7 +4,7 @@ http_exception_as_json_response, ) -from .._meta import API_VTAG +from ..._meta import API_VTAG from . import _health, _running_interactive_services, _service_extras, _services From d3299578b5a66f38a6bf8141740b3cf687057f78 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:01:21 +0100 Subject: [PATCH 082/201] adds api model --- .../api_schemas_director/__init__.py | 0 .../api_schemas_director/services.py | 5 +++ .../director/tests/api/test_rest_services.py | 40 +++++++------------ 3 files changed, 20 insertions(+), 25 deletions(-) create mode 100644 packages/models-library/src/models_library/api_schemas_director/__init__.py create mode 100644 packages/models-library/src/models_library/api_schemas_director/services.py diff --git a/packages/models-library/src/models_library/api_schemas_director/__init__.py b/packages/models-library/src/models_library/api_schemas_director/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/models-library/src/models_library/api_schemas_director/services.py b/packages/models-library/src/models_library/api_schemas_director/services.py new file mode 100644 index 00000000000..52578fd7a69 --- /dev/null +++ b/packages/models-library/src/models_library/api_schemas_director/services.py @@ -0,0 +1,5 @@ +from ..services_metadata_published import ServiceMetaDataPublished + + +class ServiceDataGet(ServiceMetaDataPublished): + ... diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/api/test_rest_services.py index 3c511e0ef61..b2db82e7902 100644 --- a/services/director/tests/api/test_rest_services.py +++ b/services/director/tests/api/test_rest_services.py @@ -3,14 +3,12 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -import json from urllib.parse import quote import httpx from fastapi import status from fixtures.fake_services import ServiceInRegistryInfoDict -from helpers import json_schema_validator -from simcore_service_director import resources +from models_library.api_schemas_director.services import ServiceDataGet def _assert_response_and_unwrap_envelope(got: httpx.Response): @@ -35,20 +33,12 @@ def _assert_services( for s in expected ] - # TODO: check these are correct! - json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) - assert json_schema_path.exists() is True - with json_schema_path.open() as file_pt: - service_schema = json.load(file_pt) - - for service in got: - service.pop("image_digest", None) - if schema_version == "v1": - assert ( - expected_key_version_tuples.count((service["key"], service["version"])) - == 1 - ) - json_schema_validator.validate_instance_object(service, service_schema) + for data in got: + service = ServiceDataGet.parse_obj(data) + assert ( + expected_key_version_tuples.count((f"{service.key}", f"{service.version}")) + == 1 + ) async def test_list_services_with_empty_registry( @@ -60,7 +50,7 @@ async def test_list_services_with_empty_registry( # empty case resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -78,7 +68,7 @@ async def test_list_services( assert docker_registry, "docker-registry is not ready?" resp = await client.get(f"/{api_version_prefix}/services") - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -97,7 +87,7 @@ async def test_get_service_bad_request( assert len(created_services) > 0 resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not services @@ -116,7 +106,7 @@ async def test_list_services_by_service_type( resp = await client.get( f"/{api_version_prefix}/services?service_type=computational" ) - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -124,7 +114,7 @@ async def test_list_services_by_service_type( assert len(services) == 3 resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") - assert resp.status_code == status.HTTP_200_OK + assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp.json()) assert not error @@ -136,17 +126,17 @@ async def test_get_services_by_key_and_version_with_empty_registry( client: httpx.AsyncClient, api_version_prefix: str ): resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" async def test_get_services_by_key_and_version( From 2f73644af823c66481c38658436f2d326976f63b Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:04:02 +0100 Subject: [PATCH 083/201] updates service-extras --- services/director/tests/api/test_rest_service_extras.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director/tests/api/test_rest_service_extras.py b/services/director/tests/api/test_rest_service_extras.py index e87a7e4cf4c..16cd76e254f 100644 --- a/services/director/tests/api/test_rest_service_extras.py +++ b/services/director/tests/api/test_rest_service_extras.py @@ -25,15 +25,15 @@ async def test_get_services_extras_by_key_and_version_with_empty_registry( resp = await client.get( f"/{api_version_prefix}/service_extras/whatever/someversion" ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND + assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" async def test_get_services_extras_by_key_and_version( From 843cccb0cae397ebde8c10671c908d7a8566d8c5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:16:50 +0100 Subject: [PATCH 084/201] migrated running services --- .../api/rest/_running_interactive_services.py | 140 ++++++++++++++++-- 1 file changed, 124 insertions(+), 16 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py index 1bd53330d30..29a58b80fba 100644 --- a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -1,41 +1,149 @@ +import logging from pathlib import Path +from typing import Annotated, Any from uuid import UUID -import arrow -from fastapi import APIRouter +from fastapi import APIRouter, Depends, FastAPI, Header, HTTPException, status +from models_library.generics import Envelope from models_library.projects import ProjectID from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID +from servicelib.fastapi.dependencies import get_app +from simcore_service_director import exceptions + +from ... import producer router = APIRouter() +log = logging.getLogger(__name__) + @router.get("/running_interactive_services") -async def list_running_services(user_id: UserID, project_id: ProjectID): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +async def list_running_services( + the_app: Annotated[FastAPI, Depends(get_app)], + user_id: UserID | None, + project_id: ProjectID | None, +): + log.debug( + "Client does list_running_services request user_id %s, project_id %s", + user_id, + project_id, + ) + try: + services = await producer.get_services_details( + the_app, + f"{user_id}" if user_id else None, + f"{project_id}" if project_id else None, + ) + return Envelope[list[dict[str, Any]]](data=services) + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err -@router.post("/running_interactive_services") +@router.post( + "/running_interactive_services", + status_code=status.HTTP_201_CREATED, +) async def start_service( + the_app: Annotated[FastAPI, Depends(get_app)], user_id: UserID, project_id: ProjectID, service_key: ServiceKey, service_uuid: UUID, service_basepath: Path, service_tag: ServiceVersion | None = None, -): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + x_simcore_user_agent: str = Header(...), +) -> Envelope[dict[str, Any]]: + log.debug( + "Client does start_service with user_id %s, project_id %s, service %s:%s, service_uuid %s, service_basepath %s, request_simcore_user_agent %s", + user_id, + project_id, + service_key, + service_tag, + service_uuid, + service_basepath, + x_simcore_user_agent, + ) + try: + service = await producer.start_service( + the_app, + f"{user_id}", + f"{project_id}", + service_key, + service_tag, + f"{service_uuid}", + f"{service_basepath}", + x_simcore_user_agent, + ) + return Envelope[dict[str, Any]](data=service) + except exceptions.ServiceStartTimeoutError as err: + + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err + except exceptions.ServiceNotAvailableError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except exceptions.ServiceUUIDInUseError as err: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail=f"{err}" + ) from err + except exceptions.RegistryConnectionError as err: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err @router.get("/running_interactive_services/{service_uuid}") -async def get_running_service(service_uuid: UUID): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" +async def get_running_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_uuid: UUID, +) -> Envelope[dict[str, Any]]: + log.debug( + "Client does get_running_service with service_uuid %s", + service_uuid, + ) + try: + service = await producer.get_service_details(the_app, f"{service_uuid}") + return Envelope[dict[str, Any]](data=service) + except exceptions.ServiceUUIDNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err + +@router.delete( + "/running_interactive_services/{service_uuid}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def stop_service( + the_app: Annotated[FastAPI, Depends(get_app)], + service_uuid: UUID, + save_state: bool = True, +): + log.debug( + "Client does stop_service with service_uuid %s", + service_uuid, + ) + try: + await producer.stop_service(the_app, f"{service_uuid}", save_state) -@router.delete("/running_interactive_services/{service_uuid}") -async def stop_service(service_uuid: UUID): - # NOTE: sync url in docker/healthcheck.py with this entrypoint! - return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" + except exceptions.ServiceUUIDNotFoundError as err: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" + ) from err + except Exception as err: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" + ) from err From 1ac2876b2e2c5e193de68af007de91e235f36a7e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:17:10 +0100 Subject: [PATCH 085/201] migrated producer --- services/director/src/simcore_service_director/producer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 2414b3d13ee..5fe7477affa 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -749,7 +749,7 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: dict, service_key: str, service_tag: str + list_of_images: dict, service_key: str, service_tag: str | None ) -> str: if service_key not in list_of_images: raise exceptions.ServiceNotAvailableError( @@ -772,6 +772,7 @@ async def _find_service_tag( ) log.debug("Service tag found is %s ", service_tag) + assert tag is not None # nosec return tag @@ -949,7 +950,7 @@ async def start_service( user_id: str, project_id: str, service_key: str, - service_tag: str, + service_tag: str | None, node_uuid: str, node_base_path: str, request_simcore_user_agent: str, From 3bc91343fa831e804fdc4dc279be415fc0aa0036 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:15:03 +0100 Subject: [PATCH 086/201] add get_application_settings --- .../src/simcore_service_director/core/settings.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 4d288431c2c..259d102a5f4 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -1,6 +1,8 @@ import datetime import warnings +from typing import cast +from fastapi import FastAPI from models_library.basic_types import ( BootModeEnum, BuildTargetEnum, @@ -151,10 +153,6 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): env=["DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME", "SIMCORE_SERVICES_NETWORK_NAME"], ) # useful when developing with an alternative registry namespace - DIRECTOR_SIMCORE_SERVICES_PREFIX: str = Field( - default="simcore/services", - env=["DIRECTOR_SIMCORE_SERVICES_PREFIX", "SIMCORE_SERVICES_PREFIX"], - ) DIRECTOR_MONITORING_ENABLED: bool = Field( default=False, env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] @@ -175,3 +173,7 @@ def _validate_substitutions(cls, v): raise ValueError(msg) return v + + +def get_application_settings(app: FastAPI) -> ApplicationSettings: + return cast(ApplicationSettings, app.state.settings) From edb2b737b7f2cd214629c799f4f61abc63e5cf2a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:15:20 +0100 Subject: [PATCH 087/201] this is a constant --- services/director/src/simcore_service_director/constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py index 8ee614e5f8e..22fd12f945d 100644 --- a/services/director/src/simcore_service_director/constants.py +++ b/services/director/src/simcore_service_director/constants.py @@ -17,3 +17,5 @@ APP_REGISTRY_CACHE_DATA_KEY: Final[str] = __name__ + "_registry_cache_data" API_ROOT: Final[str] = "api" + +DIRECTOR_SIMCORE_SERVICES_PREFIX: Final[str] = "simcore/services" From 926f8928847d27074360ec1bb8be0fc3842d25aa Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:15:41 +0100 Subject: [PATCH 088/201] migrating --- .../registry_proxy.py | 75 +++++++++++++------ 1 file changed, 52 insertions(+), 23 deletions(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 78b9c0b80d9..977bc998ccf 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -1,4 +1,3 @@ -# pylint: disable=C0111 import asyncio import enum import json @@ -11,15 +10,20 @@ from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout from fastapi import FastAPI -from simcore_service_director import config, exceptions -from simcore_service_director.cache_request_decorator import cache_requests from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_result from tenacity.wait import wait_fixed from yarl import URL -from .config import APP_CLIENT_SESSION_KEY +from . import exceptions +from .cache_request_decorator import cache_requests +from .constants import ( + DIRECTOR_SIMCORE_SERVICES_PREFIX, + ORG_LABELS_TO_SCHEMA_LABELS, + SERVICE_RUNTIME_SETTINGS, +) +from .core.settings import ApplicationSettings, get_application_settings DEPENDENCIES_LABEL_KEY: str = "simcore.service.dependencies" @@ -42,20 +46,26 @@ class ServiceType(enum.Enum): async def _basic_auth_registry_request( app: FastAPI, path: str, method: str, **session_kwargs ) -> tuple[dict, dict]: - if not config.REGISTRY_URL: + app_settings = get_application_settings(app) + if not app_settings.DIRECTOR_REGISTRY.REGISTRY_URL: msg = "URL to registry is not defined" raise exceptions.DirectorException(msg) url = URL( - f"{'https' if config.REGISTRY_SSL else 'http'}://{config.REGISTRY_URL}{path}" + f"{'https' if app_settings.DIRECTOR_REGISTRY.REGISTRY_SSL else 'http'}://{app_settings.DIRECTOR_REGISTRY.REGISTRY_URL}{path}" ) logger.debug("Requesting registry using %s", url) # try the registry with basic authentication first, spare 1 call resp_data: dict = {} resp_headers: dict = {} auth = ( - BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) - if config.REGISTRY_AUTH and config.REGISTRY_USER and config.REGISTRY_PW + BasicAuth( + login=app_settings.DIRECTOR_REGISTRY.REGISTRY_USER, + password=app_settings.DIRECTOR_REGISTRY.REGISTRY_PW.get_secret_value(), + ) + if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + and app_settings.DIRECTOR_REGISTRY.REGISTRY_USER + and app_settings.DIRECTOR_REGISTRY.REGISTRY_PW else None ) @@ -68,7 +78,12 @@ async def _basic_auth_registry_request( logger.debug("Registry unauthorized request: %s", await response.text()) # basic mode failed, test with other auth mode resp_data, resp_headers = await _auth_registry_request( - url, method, response.headers, session, **session_kwargs + app_settings, + url, + method, + response.headers, + session, + **session_kwargs, ) elif response.status == HTTPStatus.NOT_FOUND: @@ -88,15 +103,24 @@ async def _basic_auth_registry_request( return (resp_data, resp_headers) except client_exceptions.ClientError as exc: - logger.exception("Unknown error while accessing registry: %s", str(exc)) - msg = f"Unknown error while accessing registry: {str(exc)}" + logger.exception("Unknown error while accessing registry") + msg = f"Unknown error while accessing registry: {exc!s}" raise exceptions.DirectorException(msg) from exc async def _auth_registry_request( - url: URL, method: str, auth_headers: dict, session: ClientSession, **kwargs + app_settings: ApplicationSettings, + url: URL, + method: str, + auth_headers: dict, + session: ClientSession, + **kwargs, ) -> tuple[dict, dict]: - if not config.REGISTRY_AUTH or not config.REGISTRY_USER or not config.REGISTRY_PW: + if ( + not app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + or not app_settings.DIRECTOR_REGISTRY.REGISTRY_USER + or not app_settings.DIRECTOR_REGISTRY.REGISTRY_PW + ): msg = "Wrong configuration: Authentication to registry is needed!" raise exceptions.RegistryConnectionError(msg) # auth issue let's try some authentication get the auth type @@ -113,7 +137,10 @@ async def _auth_registry_request( if not auth_type: msg = "Unknown registry type: cannot deduce authentication method!" raise exceptions.RegistryConnectionError(msg) - auth = BasicAuth(login=config.REGISTRY_USER, password=config.REGISTRY_PW) + auth = BasicAuth( + login=app_settings.DIRECTOR_REGISTRY.REGISTRY_USER, + password=app_settings.DIRECTOR_REGISTRY.REGISTRY_PW.get_secret_value(), + ) # bearer type, it needs a token with all communications if auth_type == "Bearer": @@ -122,7 +149,7 @@ async def _auth_registry_request( service=auth_details["service"], scope=auth_details["scope"] ) async with session.get(token_url, auth=auth, **kwargs) as token_resp: - if not token_resp.status == HTTPStatus.OK: + if token_resp.status != HTTPStatus.OK: msg = f"Unknown error while authentifying with registry: {token_resp!s}" raise exceptions.RegistryConnectionError(msg) bearer_code = (await token_resp.json())["token"] @@ -211,6 +238,7 @@ async def on_startup() -> None: await _setup_registry(app) async def on_shutdown() -> None: + # nothing to do here ... app.add_event_handler("startup", on_startup) @@ -375,7 +403,7 @@ async def list_interactive_service_dependencies( def _get_prefix(service_type: ServiceType) -> str: - return f"{config.SIMCORE_SERVICES_PREFIX}/{service_type.value}/" + return f"{DIRECTOR_SIMCORE_SERVICES_PREFIX}/{service_type.value}/" def get_service_first_name(image_key: str) -> str: @@ -430,18 +458,19 @@ async def get_service_extras( ) -> dict[str, Any]: # check physical node requirements # all nodes require "CPU" + app_settings = get_application_settings(app) result = { "node_requirements": { - "CPU": config.DEFAULT_MAX_NANO_CPUS / 1.0e09, - "RAM": config.DEFAULT_MAX_MEMORY, + "CPU": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS / 1.0e09, + "RAM": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, } } labels, _ = await get_image_labels(app, image_key, image_tag) logger.debug("Compiling service extras from labels %s", pformat(labels)) - if config.SERVICE_RUNTIME_SETTINGS in labels: - service_settings = json.loads(labels[config.SERVICE_RUNTIME_SETTINGS]) + if SERVICE_RUNTIME_SETTINGS in labels: + service_settings = json.loads(labels[SERVICE_RUNTIME_SETTINGS]) for entry in service_settings: entry_name = entry.get("name", "").lower() entry_value = entry.get("value") @@ -455,13 +484,13 @@ async def get_service_extras( result["node_requirements"]["CPU"] = ( float(res_limit.get("NanoCPUs", 0)) or float(res_reservation.get("NanoCPUs", 0)) - or config.DEFAULT_MAX_NANO_CPUS + or app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS ) / 1.0e09 # RAM result["node_requirements"]["RAM"] = ( res_limit.get("MemoryBytes", 0) or res_reservation.get("MemoryBytes", 0) - or config.DEFAULT_MAX_MEMORY + or app_settings.DIRECTOR_DEFAULT_MAX_MEMORY ) else: invalid_with_msg = f"invalid type for resource [{entry_value}]" @@ -500,7 +529,7 @@ async def get_service_extras( result.update( { sl: labels[dl] - for dl, sl in config.ORG_LABELS_TO_SCHEMA_LABELS.items() + for dl, sl in ORG_LABELS_TO_SCHEMA_LABELS.items() if dl in labels } ) From 315bd23320c1b41a6878965b7b253f44e3b4d451 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:17:00 +0100 Subject: [PATCH 089/201] removed old main --- .../src/simcore_service_director/main_old.py | 41 ------------------- 1 file changed, 41 deletions(-) delete mode 100644 services/director/src/simcore_service_director/main_old.py diff --git a/services/director/src/simcore_service_director/main_old.py b/services/director/src/simcore_service_director/main_old.py deleted file mode 100644 index 39a6c0dfc38..00000000000 --- a/services/director/src/simcore_service_director/main_old.py +++ /dev/null @@ -1,41 +0,0 @@ -import logging - -from aiohttp import web - -# NOTE: notice that servicelib is frozen to c8669fb52659b684514fefa4f3b4599f57f276a0 -# pylint: disable=no-name-in-module -from servicelib.client_session import persistent_client_session -from simcore_service_director import registry_cache_task, resources -from simcore_service_director.monitoring import setup_app_monitoring -from simcore_service_director.rest import routing - -from .registry_proxy import setup_registry - -log = logging.getLogger(__name__) - - -def setup_app() -> web.Application: - api_spec_path = resources.get_path(resources.RESOURCE_OPEN_API) - app = routing.create_web_app(api_spec_path.parent, api_spec_path.name) - - # NOTE: ensure client session is context is run first, then any further get_client_sesions will be correctly closed - app.cleanup_ctx.append(persistent_client_session) - app.cleanup_ctx.append(setup_registry) - - registry_cache_task.setup(app) - - setup_app_monitoring(app, "simcore_service_director") - - # NOTE: removed tracing from director. Users old version of servicelib and - # in any case this service will be completely replaced - - return app - - -def main() -> None: - app = setup_app() - web.run_app(app, port=8080) - - -if __name__ == "__main__": - main() From cd664746280c3d51c4be1a6375c810aa2d63e767 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:17:27 +0100 Subject: [PATCH 090/201] removed old main --- services/director/src/simcore_service_director/__main__.py | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 services/director/src/simcore_service_director/__main__.py diff --git a/services/director/src/simcore_service_director/__main__.py b/services/director/src/simcore_service_director/__main__.py deleted file mode 100644 index 73227b1c129..00000000000 --- a/services/director/src/simcore_service_director/__main__.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python3 - -from .main import main - -main() From e7cc9732c1a0d62cf47c547643638da1efef7027 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:20:50 +0100 Subject: [PATCH 091/201] removed config --- .../src/simcore_service_director/config.py | 169 ------------------ 1 file changed, 169 deletions(-) delete mode 100644 services/director/src/simcore_service_director/config.py diff --git a/services/director/src/simcore_service_director/config.py b/services/director/src/simcore_service_director/config.py deleted file mode 100644 index 090a506adb0..00000000000 --- a/services/director/src/simcore_service_director/config.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Director service configuration -""" - -import json -import logging -import os -import warnings -from distutils.util import strtobool - -from servicelib.aiohttp.application_keys import ( # pylint: disable=no-name-in-module - APP_CLIENT_SESSION_KEY, -) - -LOGLEVEL_STR = os.environ.get("LOGLEVEL", "WARNING").upper() -log_level = getattr(logging, LOGLEVEL_STR) -logging.basicConfig( - level=log_level, - format="%(levelname)s:%(name)s-%(lineno)d: %(message)s", -) -logging.root.setLevel(log_level) - -# TODO: debug mode is define by the LOG-LEVEL and not the other way around. I leave it like that for the moment ... -DEBUG_MODE = log_level == logging.DEBUG - -API_VERSION: str = "v0" -API_ROOT: str = "api" - - -def _from_env_with_default(env: str, python_type, default): - env_value = python_type(os.environ.get(env, default)) - - return default if env_value <= 0 else env_value - - -# NOTE: these settings must be in sync with settings-library: comp_services.py (since the director is frozen) -DEFAULT_MAX_NANO_CPUS: int = _from_env_with_default( - "DEFAULT_MAX_NANO_CPUS", int, 1 * pow(10, 9) -) -DEFAULT_MAX_MEMORY: int = _from_env_with_default( - "DEFAULT_MAX_MEMORY", int, 2 * pow(1024, 3) -) # 2 GiB - -SERVICE_RUNTIME_SETTINGS: str = "simcore.service.settings" -SERVICE_REVERSE_PROXY_SETTINGS: str = "simcore.service.reverse-proxy-settings" -SERVICE_RUNTIME_BOOTSETTINGS: str = "simcore.service.bootsettings" - -ORG_LABELS_TO_SCHEMA_LABELS = { - "org.label-schema.build-date": "build_date", - "org.label-schema.vcs-ref": "vcs_ref", - "org.label-schema.vcs-url": "vcs_url", -} - -DIRECTOR_REGISTRY_CACHING: bool = strtobool( - os.environ.get("DIRECTOR_REGISTRY_CACHING", "True") -) -DIRECTOR_REGISTRY_CACHING_TTL: int = int( - os.environ.get("DIRECTOR_REGISTRY_CACHING_TTL", 15 * 60) -) - -DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str = os.environ.get( - "DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS", "" -) - - -def _parse_placement_substitutions() -> dict[str, str]: - str_env_var: str = os.environ.get( - "DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS", "{}" - ) - result: dict[str, str] = json.loads(str_env_var) - - if len(result) > 0: - warnings.warn( # noqa: B028 - "Generic resources will be replaced by the following " - f"placement constraints {result}. This is a workaround " - "for https://github.com/moby/swarmkit/pull/3162", - UserWarning, - ) - if len(result) != len(set(result.values())): - msg = f"Dictionary values must be unique, provided: {result}" - raise ValueError(msg) - - return result - - -DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ - str, str -] = _parse_placement_substitutions() - -# for passing self-signed certificate to spawned services -DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: str = os.environ.get( - "DIRECTOR_SELF_SIGNED_SSL_SECRET_ID", "" -) -DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: str = os.environ.get( - "DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME", "" -) -DIRECTOR_SELF_SIGNED_SSL_FILENAME: str = os.environ.get( - "DIRECTOR_SELF_SIGNED_SSL_FILENAME", "" -) - -DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = int( - os.environ.get("DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS", 10) -) -DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = int( - os.environ.get("DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S", 12) -) - -DIRECTOR_SERVICES_STATE_MONITOR_S: int = int( - os.environ.get("DIRECTOR_SERVICES_STATE_MONITOR_S", 8) -) - -TRAEFIK_SIMCORE_ZONE: str = os.environ.get( - "TRAEFIK_SIMCORE_ZONE", "internal_simcore_stack" -) -APP_REGISTRY_CACHE_DATA_KEY: str = __name__ + "_registry_cache_data" - -REGISTRY_AUTH: bool = strtobool(os.environ.get("REGISTRY_AUTH", "False")) -REGISTRY_USER: str = os.environ.get("REGISTRY_USER", "") -REGISTRY_PW: str = os.environ.get("REGISTRY_PW", "") -REGISTRY_URL: str = os.environ.get("REGISTRY_URL", "") -REGISTRY_PATH: str = os.environ.get("REGISTRY_PATH", None) or os.environ.get( - "REGISTRY_URL", "" -) # This is useful in case of a local registry, where the registry url (path) is relative to the host docker engine -REGISTRY_SSL: bool = strtobool(os.environ.get("REGISTRY_SSL", "True")) - -EXTRA_HOSTS_SUFFIX: str = os.environ.get("EXTRA_HOSTS_SUFFIX", "undefined") - -# these are the envs passed to the dynamic services by default -SERVICES_DEFAULT_ENVS: dict[str, str] = { - "POSTGRES_ENDPOINT": os.environ.get( - "POSTGRES_ENDPOINT", "undefined postgres endpoint" - ), - "POSTGRES_USER": os.environ.get("POSTGRES_USER", "undefined postgres user"), - "POSTGRES_PASSWORD": os.environ.get( - "POSTGRES_PASSWORD", "undefined postgres password" - ), - "POSTGRES_DB": os.environ.get("POSTGRES_DB", "undefined postgres db"), - "STORAGE_ENDPOINT": os.environ.get( - "STORAGE_ENDPOINT", "undefined storage endpoint" - ), -} - -# some services need to know the published host to be functional (paraview) -# TODO: please review if needed -PUBLISHED_HOST_NAME: str = os.environ.get("PUBLISHED_HOST_NAME", "") - -SWARM_STACK_NAME: str = os.environ.get("SWARM_STACK_NAME", "undefined-please-check") - -# used when in devel mode vs release mode -NODE_SCHEMA_LOCATION: str = os.environ.get( - "NODE_SCHEMA_LOCATION", f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json" -) -# used to find the right network name -SIMCORE_SERVICES_NETWORK_NAME: str | None = os.environ.get( - "SIMCORE_SERVICES_NETWORK_NAME" -) -# useful when developing with an alternative registry namespace -SIMCORE_SERVICES_PREFIX: str = os.environ.get( - "SIMCORE_SERVICES_PREFIX", "simcore/services" -) - -# monitoring -# NOTE: keep disabled for unit-testing otherwise mocks will not hold -MONITORING_ENABLED: bool = strtobool(os.environ.get("MONITORING_ENABLED", "False")) - -# resources: not taken from servicelib.resources since the director uses a fixed hash of that library -CPU_RESOURCE_LIMIT_KEY = "SIMCORE_NANO_CPUS_LIMIT" -MEM_RESOURCE_LIMIT_KEY = "SIMCORE_MEMORY_BYTES_LIMIT" - -__all__ = ["APP_CLIENT_SESSION_KEY"] From f559aac68358955247209b875da7fee424e5d938 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:23:21 +0100 Subject: [PATCH 092/201] removed old pylint --- .../director/src/simcore_service_director/docker_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director/src/simcore_service_director/docker_utils.py b/services/director/src/simcore_service_director/docker_utils.py index 0c6b8a1a363..7c1a832141a 100644 --- a/services/director/src/simcore_service_director/docker_utils.py +++ b/services/director/src/simcore_service_director/docker_utils.py @@ -20,18 +20,18 @@ async def docker_client() -> AsyncIterator[aiodocker.docker.Docker]: async def swarm_get_number_nodes() -> int: - async with docker_client() as client: # pylint: disable=not-async-context-manager + async with docker_client() as client: nodes = await client.nodes.list() return len(nodes) async def swarm_has_manager_nodes() -> bool: - async with docker_client() as client: # pylint: disable=not-async-context-manager + async with docker_client() as client: nodes = await client.nodes.list(filters={"role": "manager"}) return bool(nodes) async def swarm_has_worker_nodes() -> bool: - async with docker_client() as client: # pylint: disable=not-async-context-manager + async with docker_client() as client: nodes = await client.nodes.list(filters={"role": "worker"}) return bool(nodes) From 65511dc173bd244bac489ce5b6810b4dddb73f51 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:24:08 +0100 Subject: [PATCH 093/201] last one today --- services/director/src/simcore_service_director/system_utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/system_utils.py b/services/director/src/simcore_service_director/system_utils.py index cc3ee25c114..1e6a1a65b7f 100644 --- a/services/director/src/simcore_service_director/system_utils.py +++ b/services/director/src/simcore_service_director/system_utils.py @@ -1,8 +1,7 @@ from pathlib import Path -from typing import List -def get_system_extra_hosts_raw(extra_host_domain: str) -> List[str]: +def get_system_extra_hosts_raw(extra_host_domain: str) -> list[str]: extra_hosts = [] hosts_path = Path("/etc/hosts") if hosts_path.exists() and extra_host_domain != "undefined": From d33d94ed8cd531c4ed4967dd6ed4a15fb8f1590b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 08:34:38 +0100 Subject: [PATCH 094/201] missing return types --- .../api/rest/_running_interactive_services.py | 4 ++-- .../api/rest/_service_extras.py | 2 +- .../cache_request_decorator.py | 15 +++++++++++---- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py index 29a58b80fba..b3800c639c7 100644 --- a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -23,7 +23,7 @@ async def list_running_services( the_app: Annotated[FastAPI, Depends(get_app)], user_id: UserID | None, project_id: ProjectID | None, -): +) -> Envelope[list[dict[str, Any]]]: log.debug( "Client does list_running_services request user_id %s, project_id %s", user_id, @@ -131,7 +131,7 @@ async def stop_service( the_app: Annotated[FastAPI, Depends(get_app)], service_uuid: UUID, save_state: bool = True, -): +) -> None: log.debug( "Client does stop_service with service_uuid %s", service_uuid, diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index f301a74a429..cfcc19e221e 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -18,7 +18,7 @@ async def list_service_extras( the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -): +) -> Envelope[dict[str, Any]]: log.debug( "Client does service_extras_by_key_version_get request with service_key %s, service_version %s", service_key, diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py index 67844911d3f..9416a27cd46 100644 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ b/services/director/src/simcore_service_director/cache_request_decorator.py @@ -1,16 +1,23 @@ +from collections.abc import Awaitable, Callable from functools import wraps -from typing import Coroutine +from typing import Any from fastapi import FastAPI -from simcore_service_director import config +from .core.settings import get_application_settings -def cache_requests(func: Coroutine, *, no_cache: bool = False): + +def cache_requests( + func: Callable[..., Awaitable[tuple[dict[str, Any], dict[str, Any]]]], + *, + no_cache: bool = False, +) -> Callable[..., Awaitable[tuple[dict[str, Any], dict[str, Any]]]]: @wraps(func) async def wrapped( app: FastAPI, url: str, method: str, *args, **kwargs ) -> tuple[dict, dict]: - is_cache_enabled = config.DIRECTOR_REGISTRY_CACHING and method == "GET" + app_settings = get_application_settings(app) + is_cache_enabled = app_settings.DIRECTOR_REGISTRY_CACHING and method == "GET" cache_key = f"{url}:{method}" if is_cache_enabled and not no_cache: cache_data = app.state.registry_cache From 0076e7f9edf665eae1a0412542974c0364014b02 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 08:45:32 +0100 Subject: [PATCH 095/201] added instrumetnation basics --- .../instrumentation.py | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 services/director/src/simcore_service_director/instrumentation.py diff --git a/services/director/src/simcore_service_director/instrumentation.py b/services/director/src/simcore_service_director/instrumentation.py new file mode 100644 index 00000000000..08b9693f524 --- /dev/null +++ b/services/director/src/simcore_service_director/instrumentation.py @@ -0,0 +1,44 @@ +from fastapi import FastAPI +from servicelib.fastapi.prometheus_instrumentation import ( + setup_prometheus_instrumentation, +) + +from .core.settings import get_application_settings + + +def setup(app: FastAPI) -> None: + app_settings = get_application_settings(app) + if not app_settings.DIRECTOR_MONITORING_ENABLED: + return + + # NOTE: this must be setup before application startup + instrumentator = setup_prometheus_instrumentation(app) + + async def on_startup() -> None: + # metrics_subsystem = ( + # "dynamic" if app_settings.AUTOSCALING_NODES_MONITORING else "computational" + # ) + # app.state.instrumentation = ( + # AutoscalingInstrumentation( # pylint: disable=unexpected-keyword-arg + # registry=instrumentator.registry, subsystem=metrics_subsystem + # ) + # ) + ... + + async def on_shutdown() -> None: + ... + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +# def get_instrumentation(app: FastAPI) -> AutoscalingInstrumentation: +# if not app.state.instrumentation: +# raise ConfigurationError( +# msg="Instrumentation not setup. Please check the configuration." +# ) +# return cast(AutoscalingInstrumentation, app.state.instrumentation) + + +def has_instrumentation(app: FastAPI) -> bool: + return hasattr(app.state, "instrumentation") From 611f799db4c2c76a595a11681ced04c37bebabd7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 08:46:30 +0100 Subject: [PATCH 096/201] connected instrumentation --- .../director/src/simcore_service_director/core/application.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 7c5ffbcfb0a..30764ea1207 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -14,7 +14,7 @@ APP_STARTED_BANNER_MSG, ) from ..api.rest.routes import setup_api_routes -from ..monitoring import setup_app_monitoring +from ..instrumentation import setup as setup_instrumentation from ..registry_proxy import setup as setup_registry from .settings import ApplicationSettings @@ -57,7 +57,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: app.cleanup_ctx.append(persistent_client_session) setup_registry(app) registry_cache_task.setup(app) - setup_app_monitoring(app, "simcore_service_director") + setup_instrumentation(app) # ERROR HANDLERS From 2267f18e1616aea26aebdb3ef21ca70099280f01 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 08:51:25 +0100 Subject: [PATCH 097/201] added client session --- .../client_session.py | 41 +++++++++++++++++++ .../core/application.py | 4 +- 2 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 services/director/src/simcore_service_director/client_session.py diff --git a/services/director/src/simcore_service_director/client_session.py b/services/director/src/simcore_service_director/client_session.py new file mode 100644 index 00000000000..74647f13822 --- /dev/null +++ b/services/director/src/simcore_service_director/client_session.py @@ -0,0 +1,41 @@ +from aiohttp import ClientSession, ClientTimeout +from fastapi import FastAPI +from models_library.utils.json_serialization import json_dumps +from servicelib.utils import ( + get_http_client_request_aiohttp_connect_timeout, + get_http_client_request_aiohttp_sock_connect_timeout, + get_http_client_request_total_timeout, +) + + +def setup_client_session(app: FastAPI) -> None: + async def on_startup() -> None: + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/4628 + + # ANE: it is important to have fast connection handshakes + # also requests should be as fast as possible + # some services are not that fast to reply + timeout_settings = ClientTimeout( + total=get_http_client_request_total_timeout(), + connect=get_http_client_request_aiohttp_connect_timeout(), + sock_connect=get_http_client_request_aiohttp_sock_connect_timeout(), + ) + session = ClientSession( + timeout=timeout_settings, + json_serialize=json_dumps, + ) + app.state.aiohttp_client_session = session + + async def on_shutdown() -> None: + session = app.state.aiohttp_client_session + assert isinstance(session, ClientSession) # nosec + await session.close() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) + + +def get_client_session(app: FastAPI) -> ClientSession: + session = app.state.aiohttp_client_session + assert isinstance(session, ClientSession) # nosec + return session diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 30764ea1207..656437bef02 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -2,7 +2,6 @@ from typing import Final from fastapi import FastAPI -from servicelib.client_session import persistent_client_session from servicelib.fastapi.tracing import setup_tracing from .. import registry_cache_task @@ -14,6 +13,7 @@ APP_STARTED_BANNER_MSG, ) from ..api.rest.routes import setup_api_routes +from ..client_session import setup_client_session from ..instrumentation import setup as setup_instrumentation from ..registry_proxy import setup as setup_registry from .settings import ApplicationSettings @@ -54,7 +54,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: setup_tracing(app, app.state.settings.DIRECTOR_TRACING, APP_NAME) # replace by httpx client - app.cleanup_ctx.append(persistent_client_session) + setup_client_session(app) setup_registry(app) registry_cache_task.setup(app) setup_instrumentation(app) From a400efe693f6fafd02f32156d2d1b1960f9174bc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 09:01:47 +0100 Subject: [PATCH 098/201] diverse --- .../cache_request_decorator.py | 8 ++++---- .../src/simcore_service_director/producer.py | 6 ++++-- .../registry_cache_task.py | 11 +++++++---- .../simcore_service_director/registry_proxy.py | 15 ++++++++------- .../src/simcore_service_director/system_utils.py | 9 ++++++--- .../src/simcore_service_director/utils.py | 10 +++++----- 6 files changed, 34 insertions(+), 25 deletions(-) diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py index 9416a27cd46..5034e54d011 100644 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ b/services/director/src/simcore_service_director/cache_request_decorator.py @@ -1,4 +1,4 @@ -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Mapping from functools import wraps from typing import Any @@ -8,14 +8,14 @@ def cache_requests( - func: Callable[..., Awaitable[tuple[dict[str, Any], dict[str, Any]]]], + func: Callable[..., Awaitable[tuple[dict[str, Any], Mapping[str, Any]]]], *, no_cache: bool = False, -) -> Callable[..., Awaitable[tuple[dict[str, Any], dict[str, Any]]]]: +) -> Callable[..., Awaitable[tuple[dict[str, Any], Mapping[str, Any]]]]: @wraps(func) async def wrapped( app: FastAPI, url: str, method: str, *args, **kwargs - ) -> tuple[dict, dict]: + ) -> tuple[dict, Mapping]: app_settings = get_application_settings(app) is_cache_enabled = app_settings.DIRECTOR_REGISTRY_CACHING and method == "GET" cache_key = f"{url}:{method}" diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 5fe7477affa..71753de3d06 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -22,6 +22,7 @@ from fastapi import FastAPI from servicelib.async_utils import run_sequentially_in_context from servicelib.monitor_services import service_started, service_stopped +from simcore_service_director.utils import parse_as_datetime from tenacity import retry from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_attempt @@ -663,8 +664,9 @@ async def _get_service_state( elif task_state in ("running"): now = arrow.utcnow().datetime # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' - last_task["Status"]["Timestamp"] - task_state_update_time = arrow.get(last_task["Status"]["Timestamp"]).datetime + task_state_update_time = parse_as_datetime( + last_task["Status"]["Timestamp"], default=now + ) time_since_running = now - task_state_update_time log.debug("Now is %s, time since running mode is %s", now, time_since_running) diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py index 100152dfaad..1613961748e 100644 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ b/services/director/src/simcore_service_director/registry_cache_task.py @@ -4,8 +4,8 @@ from fastapi import FastAPI from servicelib.utils import logged_gather -from . import config, exceptions, registry_proxy -from .core.settings import ApplicationSettings +from . import exceptions, registry_proxy +from .core.settings import ApplicationSettings, get_application_settings _logger = logging.getLogger(__name__) @@ -13,6 +13,7 @@ async def registry_caching_task(app: FastAPI) -> None: + app_settings = get_application_settings(app) try: _logger.info("%s: initializing cache...", TASK_NAME) @@ -50,9 +51,11 @@ async def registry_caching_task(app: FastAPI) -> None: _logger.info( "cache refreshed %s: sleeping for %ss...", TASK_NAME, - config.DIRECTOR_REGISTRY_CACHING_TTL, + app_settings.DIRECTOR_REGISTRY_CACHING_TTL, + ) + await asyncio.sleep( + app_settings.DIRECTOR_REGISTRY_CACHING_TTL.total_seconds() ) - await asyncio.sleep(config.DIRECTOR_REGISTRY_CACHING_TTL) except asyncio.CancelledError: _logger.info("%s: cancelling task...", TASK_NAME) except Exception: # pylint: disable=broad-except diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 977bc998ccf..9f6042d738b 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -5,7 +5,7 @@ import re from http import HTTPStatus from pprint import pformat -from typing import Any +from typing import Any, Mapping from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout @@ -18,6 +18,7 @@ from . import exceptions from .cache_request_decorator import cache_requests +from .client_session import get_client_session from .constants import ( DIRECTOR_SIMCORE_SERVICES_PREFIX, ORG_LABELS_TO_SCHEMA_LABELS, @@ -45,7 +46,7 @@ class ServiceType(enum.Enum): async def _basic_auth_registry_request( app: FastAPI, path: str, method: str, **session_kwargs -) -> tuple[dict, dict]: +) -> tuple[dict, Mapping]: app_settings = get_application_settings(app) if not app_settings.DIRECTOR_REGISTRY.REGISTRY_URL: msg = "URL to registry is not defined" @@ -57,7 +58,7 @@ async def _basic_auth_registry_request( logger.debug("Requesting registry using %s", url) # try the registry with basic authentication first, spare 1 call resp_data: dict = {} - resp_headers: dict = {} + resp_headers: Mapping = {} auth = ( BasicAuth( login=app_settings.DIRECTOR_REGISTRY.REGISTRY_USER, @@ -69,7 +70,7 @@ async def _basic_auth_registry_request( else None ) - session = app[APP_CLIENT_SESSION_KEY] + session = get_client_session(app) try: async with session.request( method.lower(), url, auth=auth, **session_kwargs @@ -112,10 +113,10 @@ async def _auth_registry_request( app_settings: ApplicationSettings, url: URL, method: str, - auth_headers: dict, + auth_headers: Mapping, session: ClientSession, **kwargs, -) -> tuple[dict, dict]: +) -> tuple[dict, Mapping]: if ( not app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH or not app_settings.DIRECTOR_REGISTRY.REGISTRY_USER @@ -196,7 +197,7 @@ async def registry_request( method: str = "GET", no_cache: bool = False, **session_kwargs, -) -> tuple[dict, dict]: +) -> tuple[dict, Mapping]: logger.debug( "Request to registry: path=%s, method=%s. no_cache=%s", path, method, no_cache ) diff --git a/services/director/src/simcore_service_director/system_utils.py b/services/director/src/simcore_service_director/system_utils.py index 1e6a1a65b7f..ce666108dd2 100644 --- a/services/director/src/simcore_service_director/system_utils.py +++ b/services/director/src/simcore_service_director/system_utils.py @@ -6,7 +6,10 @@ def get_system_extra_hosts_raw(extra_host_domain: str) -> list[str]: hosts_path = Path("/etc/hosts") if hosts_path.exists() and extra_host_domain != "undefined": with hosts_path.open() as hosts: - for line in hosts: - if extra_host_domain in line: - extra_hosts.append(line.strip().replace("\t", " ")) + extra_hosts = [ + line.strip().replace("\t", " ") + for line in hosts + if extra_host_domain in line + ] + return extra_hosts diff --git a/services/director/src/simcore_service_director/utils.py b/services/director/src/simcore_service_director/utils.py index a05e0bdbb39..ad07d27d1b3 100644 --- a/services/director/src/simcore_service_director/utils.py +++ b/services/director/src/simcore_service_director/utils.py @@ -1,6 +1,7 @@ import logging from datetime import datetime -from typing import Optional + +import arrow log = logging.getLogger(__name__) @@ -8,7 +9,7 @@ _MAXLEN = len("2020-10-09T12:28:14.7710") -def parse_as_datetime(timestr: str, *, default: Optional[datetime] = None) -> datetime: +def parse_as_datetime(timestr: str, *, default: datetime | None = None) -> datetime: """ default: if parsing is not possible, it returs default @@ -19,9 +20,8 @@ def parse_as_datetime(timestr: str, *, default: Optional[datetime] = None) -> da # The 099 before the Z is not clear, therefore we will truncate the last part try: - timestr = timestr.strip("Z ")[:_MAXLEN] - dt = datetime.strptime(timestr, DATETIME_FORMAT) - return dt + return arrow.get(timestr).datetime + except ValueError as err: log.debug("Failed to parse %s: %s", timestr, err) if default is not None: From cc5662bfbf6aeb82676a113064876378110017b4 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 09:30:38 +0100 Subject: [PATCH 099/201] producer --- .../simcore_service_director/core/settings.py | 1 + .../src/simcore_service_director/producer.py | 323 ++++++++++-------- .../services_common.py | 6 +- 3 files changed, 189 insertions(+), 141 deletions(-) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 259d102a5f4..28c610e91d0 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -100,6 +100,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = 10 DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = 12 + DIRECTOR_SERVICES_STATE_MONITOR_S: int = 8 DIRECTOR_TRAEFIK_SIMCORE_ZONE: str = Field( default="internal_simcore_stack", diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 71753de3d06..7d1e31cb3ae 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -1,14 +1,16 @@ import asyncio +import contextlib import json import logging import re from datetime import timedelta -from distutils.version import StrictVersion from enum import Enum from http import HTTPStatus from pprint import pformat +from typing import Final import aiodocker +import aiodocker.networks import aiohttp import arrow import tenacity @@ -18,25 +20,33 @@ ClientResponse, ClientResponseError, ClientSession, + ClientTimeout, ) from fastapi import FastAPI +from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context -from servicelib.monitor_services import service_started, service_stopped -from simcore_service_director.utils import parse_as_datetime + +# from servicelib.monitor_services import service_started, service_stopped +from settings_library.docker_registry import RegistrySettings from tenacity import retry from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_attempt from tenacity.wait import wait_fixed -from . import config, docker_utils, exceptions, registry_proxy -from .config import ( - APP_CLIENT_SESSION_KEY, +from . import docker_utils, exceptions, registry_proxy +from .client_session import get_client_session +from .constants import ( CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY, + SERVICE_REVERSE_PROXY_SETTINGS, + SERVICE_RUNTIME_BOOTSETTINGS, + SERVICE_RUNTIME_SETTINGS, ) +from .core.settings import ApplicationSettings, get_application_settings from .exceptions import ServiceStateSaveError from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw +from .utils import parse_as_datetime log = logging.getLogger(__name__) @@ -50,8 +60,11 @@ class ServiceState(Enum): FAILED = "failed" -async def _create_auth() -> dict[str, str]: - return {"username": config.REGISTRY_USER, "password": config.REGISTRY_PW} +async def _create_auth(registry_settings: RegistrySettings) -> dict[str, str]: + return { + "username": registry_settings.REGISTRY_USER, + "password": registry_settings.REGISTRY_PW.get_secret_value(), + } async def _check_node_uuid_available( @@ -105,13 +118,16 @@ def _parse_mount_settings(settings: list[dict]) -> list[dict]: return mounts +_ENV_NUM_ELEMENTS: Final[int] = 2 + + def _parse_env_settings(settings: list[str]) -> dict: envs = {} for s in settings: log.debug("Retrieved env settings %s", s) if "=" in s: parts = s.split("=") - if len(parts) == 2: + if len(parts) == _ENV_NUM_ELEMENTS: envs.update({parts[0]: parts[1]}) log.debug("Parsed env settings %s", s) @@ -141,6 +157,7 @@ def _to_simcore_runtime_docker_label_key(key: str) -> str: # pylint: disable=too-many-branches async def _create_docker_service_params( app: FastAPI, + *, client: aiodocker.docker.Docker, service_key: str, service_tag: str, @@ -153,16 +170,18 @@ async def _create_docker_service_params( request_simcore_user_agent: str, ) -> dict: # pylint: disable=too-many-statements + app_settings = get_application_settings(app) + service_parameters_labels = await _read_service_settings( - app, service_key, service_tag, config.SERVICE_RUNTIME_SETTINGS + app, service_key, service_tag, SERVICE_RUNTIME_SETTINGS ) reverse_proxy_settings = await _read_service_settings( - app, service_key, service_tag, config.SERVICE_REVERSE_PROXY_SETTINGS + app, service_key, service_tag, SERVICE_REVERSE_PROXY_SETTINGS ) service_name = registry_proxy.get_service_last_names(service_key) + "_" + node_uuid log.debug("Converting labels to docker runtime parameters") container_spec = { - "Image": f"{config.REGISTRY_PATH}/{service_key}:{service_tag}", + "Image": f"{app_settings.DIRECTOR_REGISTRY.resolved_registry_url}/{service_key}:{service_tag}", "Env": { **config.SERVICES_DEFAULT_ENVS, "SIMCORE_USER_ID": user_id, @@ -171,7 +190,7 @@ async def _create_docker_service_params( "SIMCORE_NODE_BASEPATH": node_base_path or "", "SIMCORE_HOST_NAME": service_name, }, - "Hosts": get_system_extra_hosts_raw(config.EXTRA_HOSTS_SUFFIX), + "Hosts": get_system_extra_hosts_raw(app_settings.DIRECTOR_EXTRA_HOSTS_SUFFIX), "Init": True, "Labels": { _to_simcore_runtime_docker_label_key("user_id"): user_id, @@ -179,7 +198,7 @@ async def _create_docker_service_params( _to_simcore_runtime_docker_label_key("node_id"): node_uuid, _to_simcore_runtime_docker_label_key( "swarm_stack_name" - ): config.SWARM_STACK_NAME, + ): app_settings.DIRECTOR_SWARM_STACK_NAME, _to_simcore_runtime_docker_label_key( "simcore_user_agent" ): request_simcore_user_agent, @@ -193,20 +212,20 @@ async def _create_docker_service_params( } if ( - config.DIRECTOR_SELF_SIGNED_SSL_FILENAME - and config.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID - and config.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME + app_settings.DIRECTOR_SELF_SIGNED_SSL_FILENAME + and app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID + and app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME ): # Note: this is useful for S3 client in case of self signed certificate container_spec["Env"][ "SSL_CERT_FILE" - ] = config.DIRECTOR_SELF_SIGNED_SSL_FILENAME + ] = app_settings.DIRECTOR_SELF_SIGNED_SSL_FILENAME container_spec["Secrets"] = [ { - "SecretID": config.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID, - "SecretName": config.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME, + "SecretID": app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID, + "SecretName": app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME, "File": { - "Name": config.DIRECTOR_SELF_SIGNED_SSL_FILENAME, + "Name": app_settings.DIRECTOR_SELF_SIGNED_SSL_FILENAME, "Mode": 444, "UID": "0", "GID": "0", @@ -216,8 +235,16 @@ async def _create_docker_service_params( # SEE https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate docker_params = { - "auth": await _create_auth() if config.REGISTRY_AUTH else {}, - "registry": config.REGISTRY_PATH if config.REGISTRY_AUTH else "", + "auth": ( + await _create_auth(app_settings.DIRECTOR_REGISTRY) + if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + else {} + ), + "registry": ( + app_settings.DIRECTOR_REGISTRY.resolved_registry_url + if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH + else "" + ), "name": service_name, "task_template": { "ContainerSpec": container_spec, @@ -230,17 +257,18 @@ async def _create_docker_service_params( }, "RestartPolicy": { "Condition": "on-failure", - "Delay": config.DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S * pow(10, 6), - "MaxAttempts": config.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS, + "Delay": app_settings.DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S + * pow(10, 6), + "MaxAttempts": app_settings.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS, }, "Resources": { "Limits": { - "NanoCPUs": config.DEFAULT_MAX_NANO_CPUS, - "MemoryBytes": config.DEFAULT_MAX_MEMORY, + "NanoCPUs": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS, + "MemoryBytes": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, }, "Reservations": { - "NanoCPUs": config.DEFAULT_MAX_NANO_CPUS, - "MemoryBytes": config.DEFAULT_MAX_MEMORY, + "NanoCPUs": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS, + "MemoryBytes": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, }, }, }, @@ -251,7 +279,7 @@ async def _create_docker_service_params( _to_simcore_runtime_docker_label_key("node_id"): node_uuid, _to_simcore_runtime_docker_label_key( "swarm_stack_name" - ): config.SWARM_STACK_NAME, + ): app_settings.DIRECTOR_SWARM_STACK_NAME, _to_simcore_runtime_docker_label_key( "simcore_user_agent" ): request_simcore_user_agent, @@ -263,38 +291,38 @@ async def _create_docker_service_params( _to_simcore_runtime_docker_label_key("type"): ( "main" if main_service else "dependency" ), - "io.simcore.zone": f"{config.TRAEFIK_SIMCORE_ZONE}", + "io.simcore.zone": f"{app_settings.DIRECTOR_TRAEFIK_SIMCORE_ZONE}", "traefik.enable": "true" if main_service else "false", f"traefik.http.services.{service_name}.loadbalancer.server.port": "8080", f"traefik.http.routers.{service_name}.rule": f"PathPrefix(`/x/{node_uuid}`)", f"traefik.http.routers.{service_name}.entrypoints": "http", f"traefik.http.routers.{service_name}.priority": "10", - f"traefik.http.routers.{service_name}.middlewares": f"{config.SWARM_STACK_NAME}_gzip@swarm", + f"traefik.http.routers.{service_name}.middlewares": f"{app_settings.DIRECTOR_SWARM_STACK_NAME}_gzip@swarm", }, "networks": [internal_network_id] if internal_network_id else [], } - if config.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: + if app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: log.debug( - "adding custom constraints %s ", config.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS + "adding custom constraints %s ", + app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS, ) docker_params["task_template"]["Placement"]["Constraints"] += [ - config.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS + app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS ] - if reverse_proxy_settings: - # some services define strip_path:true if they need the path to be stripped away - if reverse_proxy_settings.get("strip_path"): - docker_params["labels"][ - f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" - ] = f"^/x/{node_uuid}" - docker_params["labels"][ - f"traefik.http.routers.{service_name}.middlewares" - ] += f", {service_name}_stripprefixregex" + # some services define strip_path:true if they need the path to be stripped away + if reverse_proxy_settings and reverse_proxy_settings.get("strip_path"): + docker_params["labels"][ + f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" + ] = f"^/x/{node_uuid}" + docker_params["labels"][ + f"traefik.http.routers.{service_name}.middlewares" + ] += f", {service_name}_stripprefixregex" placement_constraints_to_substitute: list[str] = [] placement_substitutions: dict[ str, str - ] = config.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS + ] = app_settings.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS for param in service_parameters_labels: _check_setting_correctness(param) @@ -374,18 +402,17 @@ async def _create_docker_service_params( ) # REST-API compatible elif param["type"] == "EndpointSpec": - if "Ports" in param["value"]: - if ( - isinstance(param["value"]["Ports"], list) - and "TargetPort" in param["value"]["Ports"][0] - ): - docker_params["labels"][ - _to_simcore_runtime_docker_label_key("port") - ] = docker_params["labels"][ - f"traefik.http.services.{service_name}.loadbalancer.server.port" - ] = str( - param["value"]["Ports"][0]["TargetPort"] - ) + if "Ports" in param["value"] and ( + isinstance(param["value"]["Ports"], list) + and "TargetPort" in param["value"]["Ports"][0] + ): + docker_params["labels"][ + _to_simcore_runtime_docker_label_key("port") + ] = docker_params["labels"][ + f"traefik.http.services.{service_name}.loadbalancer.server.port" + ] = str( + param["value"]["Ports"][0]["TargetPort"] + ) # placement constraints elif ( @@ -415,7 +442,7 @@ async def _create_docker_service_params( # attach the service to the swarm network dedicated to services try: - swarm_network = await _get_swarm_network(client) + swarm_network = await _get_swarm_network(client, app_settings=app_settings) swarm_network_id = swarm_network["Id"] swarm_network_name = swarm_network["Name"] docker_params["networks"].append(swarm_network_id) @@ -465,10 +492,12 @@ def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: return "" -async def _get_swarm_network(client: aiodocker.docker.Docker) -> dict: +async def _get_swarm_network( + client: aiodocker.docker.Docker, app_settings: ApplicationSettings +) -> dict: network_name = "_default" - if config.SIMCORE_SERVICES_NETWORK_NAME: - network_name = f"{config.SIMCORE_SERVICES_NETWORK_NAME}" + if app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: + network_name = f"{app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME}" # try to find the network name (usually named STACKNAME_default) networks = [ x @@ -525,21 +554,21 @@ async def _pass_port_to_service( port: str, service_boot_parameters_labels: dict, session: ClientSession, + app_settings: ApplicationSettings, ) -> None: for param in service_boot_parameters_labels: _check_setting_correctness(param) if param["name"] == "published_host": - # time.sleep(5) route = param["value"] log.debug( "Service needs to get published host %s:%s using route %s", - config.PUBLISHED_HOST_NAME, + app_settings.DIRECTOR_PUBLISHED_HOST_NAME, port, route, ) service_url = "http://" + service_name + "/" + route # NOSONAR query_string = { - "hostname": str(config.PUBLISHED_HOST_NAME), + "hostname": str(app_settings.DIRECTOR_PUBLISHED_HOST_NAME), "port": str(port), } log.debug("creating request %s and query %s", service_url, query_string) @@ -609,7 +638,7 @@ async def _remove_overlay_network_of_swarm( async def _get_service_state( - client: aiodocker.docker.Docker, service: dict + client: aiodocker.docker.Docker, service: dict, app_settings: ApplicationSettings ) -> tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] @@ -636,12 +665,10 @@ async def _get_service_state( log.debug("%s %s", service["ID"], task_state) last_task_state = ServiceState.STARTING # default - last_task_error_msg = ( - last_task["Status"]["Err"] if "Err" in last_task["Status"] else "" - ) + last_task_error_msg = last_task["Status"].get("Err", "") if task_state in ("failed"): # check if it failed already the max number of attempts we allow for - if len(tasks) < config.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: + if len(tasks) < app_settings.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: log.debug("number of tasks: %s", len(tasks)) last_task_state = ServiceState.STARTING else: @@ -671,7 +698,7 @@ async def _get_service_state( log.debug("Now is %s, time since running mode is %s", now, time_since_running) if time_since_running > timedelta( - seconds=config.DIRECTOR_SERVICES_STATE_MONITOR_S + seconds=app_settings.DIRECTOR_SERVICES_STATE_MONITOR_S ): last_task_state = ServiceState.RUNNING else: @@ -710,7 +737,7 @@ async def _wait_until_service_running_or_failed( log.debug("Waited for service %s to start", service_name) -async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[dict]]: +async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[str]]: # get the available image for the main service (syntax is image:tag) list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) @@ -760,7 +787,7 @@ async def _find_service_tag( # filter incorrect chars filtered_tags_list = filter(_TAG_REGEX.search, list_of_images[service_key]) # sort them now - available_tags_list = sorted(filtered_tags_list, key=StrictVersion) + available_tags_list = sorted(filtered_tags_list, key=Version) # not tags available... probably an undefined service there... if not available_tags_list: raise exceptions.ServiceNotAvailableError(service_key, service_tag) @@ -780,6 +807,7 @@ async def _find_service_tag( async def _start_docker_service( app: FastAPI, + *, client: aiodocker.docker.Docker, user_id: str, project_id: str, @@ -791,18 +819,19 @@ async def _start_docker_service( internal_network_id: str | None, request_simcore_user_agent: str, ) -> dict: # pylint: disable=R0913 + app_settings = get_application_settings(app) service_parameters = await _create_docker_service_params( app, - client, - service_key, - service_tag, - main_service, - user_id, - node_uuid, - project_id, - node_base_path, - internal_network_id, - request_simcore_user_agent, + client=client, + service_key=service_key, + service_tag=service_tag, + main_service=main_service, + user_id=user_id, + node_uuid=node_uuid, + project_id=project_id, + node_base_path=node_base_path, + internal_network_id=internal_network_id, + request_simcore_user_agent=request_simcore_user_agent, ) log.debug( "Starting docker service %s:%s using parameters %s", @@ -822,23 +851,30 @@ async def _start_docker_service( # get the full info from docker service = await client.services.inspect(service["ID"]) service_name = service["Spec"]["Name"] - service_state, service_msg = await _get_service_state(client, service) + service_state, service_msg = await _get_service_state( + client, dict(service), app_settings=app_settings + ) # wait for service to start - # await _wait_until_service_running_or_failed(client, service, node_uuid) log.debug("Service %s successfully started", service_name) # the docker swarm maybe opened some random port to access the service, get the latest version of the service service = await client.services.inspect(service["ID"]) - published_port, target_port = await _get_docker_image_port_mapping(service) + published_port, target_port = await _get_docker_image_port_mapping( + dict(service) + ) # now pass boot parameters service_boot_parameters_labels = await _read_service_settings( - app, service_key, service_tag, config.SERVICE_RUNTIME_BOOTSETTINGS + app, service_key, service_tag, SERVICE_RUNTIME_BOOTSETTINGS ) service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) if published_port: - session = app[APP_CLIENT_SESSION_KEY] + session = get_client_session(app) await _pass_port_to_service( - service_name, published_port, service_boot_parameters_labels, session + service_name, + published_port, + service_boot_parameters_labels, + session, + app_settings=app_settings, ) return { @@ -867,10 +903,8 @@ async def _start_docker_service( async def _silent_service_cleanup(app: FastAPI, node_uuid: str) -> None: - try: - await stop_service(app, node_uuid, False) - except exceptions.DirectorException: - pass + with contextlib.suppress(exceptions.DirectorException): + await stop_service(app, node_uuid=node_uuid, save_state=False) async def _create_node( @@ -905,16 +939,16 @@ async def _create_node( for service in list_of_services: service_meta_data = await _start_docker_service( app, - client, - user_id, - project_id, - service["key"], - service["tag"], - list_of_services.index(service) == 0, - node_uuid, - node_base_path, - inter_docker_network_id, - request_simcore_user_agent, + client=client, + user_id=user_id, + project_id=project_id, + service_key=service["key"], + service_tag=service["tag"], + main_service=list_of_services.index(service) == 0, + node_uuid=node_uuid, + node_base_path=node_base_path, + internal_network_id=inter_docker_network_id, + request_simcore_user_agent=request_simcore_user_agent, ) containers_meta_data.append(service_meta_data) @@ -922,15 +956,17 @@ async def _create_node( async def _get_service_key_version_from_docker_service( - service: dict, + service: dict, registry_settings: RegistrySettings ) -> tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) - if not service_full_name.startswith(config.REGISTRY_PATH): + if not service_full_name.startswith(registry_settings.resolved_registry_url): raise exceptions.DirectorException( - msg=f"Invalid service '{service_full_name}', it is missing {config.REGISTRY_PATH}" + msg=f"Invalid service '{service_full_name}', it is missing {registry_settings.resolved_registry_url}" ) - service_full_name = service_full_name[len(config.REGISTRY_PATH) :].strip("/") + service_full_name = service_full_name[ + len(registry_settings.resolved_registry_url) : + ].strip("/") service_re_match = _SERVICE_KEY_REGEX.match(service_full_name) if not service_re_match: raise exceptions.DirectorException( @@ -957,7 +993,7 @@ async def start_service( node_base_path: str, request_simcore_user_agent: str, ) -> dict: - # pylint: disable=C0103 + app_settings = get_application_settings(app) log.debug( "starting service %s:%s using uuid %s, basepath %s", service_key, @@ -989,14 +1025,16 @@ async def start_service( request_simcore_user_agent, ) node_details = containers_meta_data[0] - if config.MONITORING_ENABLED: - service_started( - app, - "undefined_user", # NOTE: to prevent high cardinality metrics this is disabled - service_key, - service_tag, - "DYNAMIC", - ) + if app_settings.DIRECTOR_MONITORING_ENABLED: + ... + # TODO: is monitoring necessary? + # service_started( + # app, + # "undefined_user", # NOTE: to prevent high cardinality metrics this is disabled + # service_key, + # service_tag, + # "DYNAMIC", + # ) # we return only the info of the main service return node_details @@ -1004,17 +1042,18 @@ async def start_service( async def _get_node_details( app: FastAPI, client: aiodocker.docker.Docker, service: dict ) -> dict: + app_settings = get_application_settings(app) service_key, service_tag = await _get_service_key_version_from_docker_service( - service + service, registry_settings=app_settings.DIRECTOR_REGISTRY ) # get boot parameters results = await asyncio.gather( _read_service_settings( - app, service_key, service_tag, config.SERVICE_RUNTIME_BOOTSETTINGS + app, service_key, service_tag, SERVICE_RUNTIME_BOOTSETTINGS ), _get_service_basepath_from_docker_service(service), - _get_service_state(client, service), + _get_service_state(client, service, app_settings=app_settings), ) service_boot_parameters_labels = results[0] @@ -1051,11 +1090,12 @@ async def _get_node_details( async def get_services_details( app: FastAPI, user_id: str | None, study_id: str | None ) -> list[dict]: + app_settings = get_application_settings(app) async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: filters = [ f"{_to_simcore_runtime_docker_label_key('type')}=main", - f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={config.SWARM_STACK_NAME}", + f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={app_settings.DIRECTOR_SWARM_STACK_NAME}", ] if user_id: filters.append( @@ -1070,7 +1110,7 @@ async def get_services_details( ) return [ - await _get_node_details(app, client, service) + await _get_node_details(app, client, dict(service)) for service in list_running_services ] except aiodocker.exceptions.DockerError as err: @@ -1084,14 +1124,15 @@ async def get_services_details( async def get_service_details(app: FastAPI, node_uuid: str) -> dict: - async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager + app_settings = get_application_settings(app) + async with docker_utils.docker_client() as client: try: list_running_services_with_uuid = await client.services.list( filters={ "label": [ f"{_to_simcore_runtime_docker_label_key('node_id')}={node_uuid}", f"{_to_simcore_runtime_docker_label_key('type')}=main", - f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={config.SWARM_STACK_NAME}", + f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={app_settings.DIRECTOR_SWARM_STACK_NAME}", ] } ) @@ -1106,7 +1147,7 @@ async def get_service_details(app: FastAPI, node_uuid: str) -> dict: ) return await _get_node_details( - app, client, list_running_services_with_uuid[0] + app, client, dict(list_running_services_with_uuid[0]) ) except aiodocker.exceptions.DockerError as err: log.exception("Error while accessing container with uuid: %s", node_uuid) @@ -1120,11 +1161,15 @@ async def get_service_details(app: FastAPI, node_uuid: str) -> dict: reraise=True, retry=retry_if_exception_type(ClientConnectionError), ) -async def _save_service_state(service_host_name: str, session: aiohttp.ClientSession): +async def _save_service_state( + service_host_name: str, session: aiohttp.ClientSession +) -> None: response: ClientResponse async with session.post( url=f"http://{service_host_name}/state", # NOSONAR - timeout=ServicesCommonSettings().director_dynamic_service_save_timeout, + timeout=ClientTimeout( + ServicesCommonSettings().director_dynamic_service_save_timeout + ), ) as response: try: response.raise_for_status() @@ -1158,7 +1203,8 @@ async def _save_service_state(service_host_name: str, session: aiohttp.ClientSes @run_sequentially_in_context(target_args=["node_uuid"]) -async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: +async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> None: + app_settings = get_application_settings(app) log.debug( "stopping service with node_uuid=%s, save_state=%s", node_uuid, save_state ) @@ -1170,7 +1216,7 @@ async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: filters={ "label": [ f"{_to_simcore_runtime_docker_label_key('node_id')}={node_uuid}", - f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={config.SWARM_STACK_NAME}", + f"{_to_simcore_runtime_docker_label_key('swarm_stack_name')}={app_settings.DIRECTOR_SWARM_STACK_NAME}", ] } ) @@ -1187,7 +1233,6 @@ async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: # save the state of the main service if it can service_details = await get_service_details(app, node_uuid) - # FIXME: the exception for the 3d-viewer shall be removed once the dy-sidecar comes in service_host_name = "{}:{}{}".format( service_details["service_host"], ( @@ -1207,7 +1252,7 @@ async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: log.debug("saving state of service %s...", service_host_name) try: await _save_service_state( - service_host_name, session=app[APP_CLIENT_SESSION_KEY] + service_host_name, session=get_client_session(app) ) except ClientResponseError as err: raise ServiceStateSaveError( @@ -1241,12 +1286,14 @@ async def stop_service(app: FastAPI, node_uuid: str, save_state: bool) -> None: await _remove_overlay_network_of_swarm(client, node_uuid) log.debug("removed network") - if config.MONITORING_ENABLED: - service_stopped( - app, - "undefined_user", - service_details["service_key"], - service_details["service_version"], - "DYNAMIC", - "SUCCESS", - ) + if app_settings.DIRECTOR_MONITORING_ENABLED: + ... + # TODO: is it necessary still? + # service_stopped( + # app, + # "undefined_user", + # service_details["service_key"], + # service_details["service_version"], + # "DYNAMIC", + # "SUCCESS", + # ) diff --git a/services/director/src/simcore_service_director/services_common.py b/services/director/src/simcore_service_director/services_common.py index f1aef5ac668..b92bbd1ca80 100644 --- a/services/director/src/simcore_service_director/services_common.py +++ b/services/director/src/simcore_service_director/services_common.py @@ -11,14 +11,14 @@ class ServicesCommonSettings(BaseSettings): # set this interval to 1 hour director_dynamic_service_save_timeout: PositiveInt = Field( - _BASE_TIMEOUT_FOR_STOPPING_SERVICES, + default=_BASE_TIMEOUT_FOR_STOPPING_SERVICES, description=( "When stopping a dynamic service, if it has " "big payloads it is important to have longer timeouts." ), ) webserver_director_stop_service_timeout: PositiveInt = Field( - _BASE_TIMEOUT_FOR_STOPPING_SERVICES + 10, + default=_BASE_TIMEOUT_FOR_STOPPING_SERVICES + 10, description=( "When the webserver invokes the director API to stop " "a service which has a very long timeout, it also " @@ -26,7 +26,7 @@ class ServicesCommonSettings(BaseSettings): ), ) storage_service_upload_download_timeout: PositiveInt = Field( - 60 * 60, + default=60 * 60, description=( "When dynamic services upload and download data from storage, " "sometimes very big payloads are involved. In order to handle " From 76365569e619255f33f2baf913f40341db1736e7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 09:34:44 +0100 Subject: [PATCH 100/201] producer --- .../director/src/simcore_service_director/producer.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 7d1e31cb3ae..3a5d59cc6f5 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -180,10 +180,17 @@ async def _create_docker_service_params( ) service_name = registry_proxy.get_service_last_names(service_key) + "_" + node_uuid log.debug("Converting labels to docker runtime parameters") + service_default_envs = { + "POSTGRES_ENDPOINT": app_settings.DIRECTOR_POSTGRES.dsn, + "POSTGRES_USER": app_settings.DIRECTOR_POSTGRES.POSTGRES_USER, + "POSTGRES_PASSWORD": app_settings.DIRECTOR_POSTGRES.POSTGRES_PASSWORD.get_secret_value(), + "POSTGRES_DB": app_settings.DIRECTOR_POSTGRES.POSTGRES_DB, + "STORAGE_ENDPOINT": app_settings.STORAGE_ENDPOINT, + } container_spec = { "Image": f"{app_settings.DIRECTOR_REGISTRY.resolved_registry_url}/{service_key}:{service_tag}", "Env": { - **config.SERVICES_DEFAULT_ENVS, + **service_default_envs, "SIMCORE_USER_ID": user_id, "SIMCORE_NODE_UUID": node_uuid, "SIMCORE_PROJECT_ID": project_id, From ed2755d1eba0663d4fa3cb1090c80a7ad04cc9d3 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 10:02:43 +0100 Subject: [PATCH 101/201] changed wrong stuff --- services/director/tests/conftest.py | 65 ++++++++++++++++------------- 1 file changed, 37 insertions(+), 28 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index d09e4623d40..0d898cf00a4 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -3,7 +3,6 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -import os from collections.abc import AsyncIterator from pathlib import Path @@ -13,7 +12,6 @@ from fastapi import FastAPI from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_director import config, resources from simcore_service_director.core.application import create_app from simcore_service_director.core.settings import ApplicationSettings @@ -59,45 +57,56 @@ def common_schemas_specs_dir(osparc_simcore_root_dir: Path) -> Path: return specs_dir -@pytest.fixture -def configure_schemas_location( - installed_package_dir: Path, common_schemas_specs_dir: Path -) -> None: - config.NODE_SCHEMA_LOCATION = str( - common_schemas_specs_dir / "node-meta-v0.0.1.json" - ) - resources.RESOURCE_NODE_SCHEMA = os.path.relpath( - config.NODE_SCHEMA_LOCATION, installed_package_dir - ) - - @pytest.fixture(scope="session") -def configure_swarm_stack_name() -> None: - config.SWARM_STACK_NAME = "test_stack" +def configure_swarm_stack_name(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: + return setenvs_from_dict(monkeypatch, envs={"SWARM_STACK_NAME": "test_stack"}) @pytest.fixture -def configure_registry_access(docker_registry: str) -> None: - config.REGISTRY_URL = docker_registry - config.REGISTRY_PATH = docker_registry - config.REGISTRY_SSL = False - config.DIRECTOR_REGISTRY_CACHING = False +def configure_registry_access( + monkeypatch: pytest.MonkeyPatch, docker_registry: str +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + envs={ + "REGISTRY_URL": docker_registry, + "REGISTRY_PATH": docker_registry, + "REGISTRY_SSL": False, + "DIRECTOR_REGISTRY_CACHING": False, + }, + ) @pytest.fixture(scope="session") -def configure_custom_registry(pytestconfig: pytest.Config) -> None: +def configure_custom_registry( + monkeypatch: pytest.MonkeyPatch, pytestconfig: pytest.Config +) -> EnvVarsDict: # to set these values call # pytest --registry_url myregistry --registry_user username --registry_pw password - config.REGISTRY_URL = pytestconfig.getoption("registry_url") - config.REGISTRY_AUTH = True - config.REGISTRY_USER = pytestconfig.getoption("registry_user") - config.REGISTRY_PW = pytestconfig.getoption("registry_pw") - config.DIRECTOR_REGISTRY_CACHING = False + registry_url = pytestconfig.getoption("registry_url") + assert registry_url + assert isinstance(registry_url, str) + registry_user = pytestconfig.getoption("registry_user") + assert registry_user + assert isinstance(registry_user, str) + registry_pw = pytestconfig.getoption("registry_pw") + assert registry_pw + assert isinstance(registry_pw, str) + return setenvs_from_dict( + monkeypatch, + envs={ + "REGISTRY_URL": registry_url, + "REGISTRY_AUTH": True, + "REGISTRY_USER": registry_user, + "REGISTRY_PW": registry_pw, + "REGISTRY_SSL": False, + "DIRECTOR_REGISTRY_CACHING": False, + }, + ) @pytest.fixture def api_version_prefix() -> str: - assert "v0" in resources.listdir(resources.RESOURCE_OPENAPI_ROOT) return "v0" From 86a0d4e6dd13fa9d22adc8c62a6b903687af45c9 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 10:06:37 +0100 Subject: [PATCH 102/201] tests are starting now --- services/director/tests/conftest.py | 9 +++++++-- .../director/tests/fixtures/fake_services.py | 16 +++++++++++----- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 0d898cf00a4..94b34328756 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -128,11 +128,16 @@ def app_environment( MAX_TIME_FOR_APP_TO_SHUTDOWN = 10 +@pytest.fixture +def app_settings(app_environment: EnvVarsDict) -> ApplicationSettings: + return ApplicationSettings.create_from_envs() + + @pytest.fixture async def app( - app_environment: EnvVarsDict, is_pdb_enabled: bool + app_setting: ApplicationSettings, is_pdb_enabled: bool ) -> AsyncIterator[FastAPI]: - the_test_app = create_app(settings=ApplicationSettings.create_from_envs()) + the_test_app = create_app(settings=app_setting) async with LifespanManager( the_test_app, startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/fixtures/fake_services.py index 76785c039d0..1edb799ee9c 100644 --- a/services/director/tests/fixtures/fake_services.py +++ b/services/director/tests/fixtures/fake_services.py @@ -19,7 +19,7 @@ from aiodocker import utils from aiodocker.docker import Docker from aiodocker.exceptions import DockerError -from simcore_service_director.config import DEFAULT_MAX_MEMORY, DEFAULT_MAX_NANO_CPUS +from simcore_service_director.core.settings import ApplicationSettings _logger = logging.getLogger(__name__) @@ -115,7 +115,8 @@ async def _build_and_push_image( dependent_image=None, *, bad_json_format: bool = False, -) -> ServiceInRegistryInfoDict: # pylint: disable=R0913 + app_settings: ApplicationSettings, +) -> ServiceInRegistryInfoDict: # crate image service_description = _create_service_description(service_type, name, tag) @@ -176,8 +177,8 @@ async def _build_and_push_image( # create the typical org.label-schema labels service_extras = ServiceExtrasDict( node_requirements=NodeRequirementsDict( - CPU=DEFAULT_MAX_NANO_CPUS / 1e9, - RAM=DEFAULT_MAX_MEMORY, + CPU=app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS / 1e9, + RAM=app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, ), build_date="2020-08-19T15:36:27Z", vcs_ref="ca180ef1", @@ -247,7 +248,9 @@ async def __call__( @pytest.fixture -def push_services(docker_registry: str) -> Iterator[PushServicesCallable]: +def push_services( + docker_registry: str, app_settings: ApplicationSettings +) -> Iterator[PushServicesCallable]: registry_url = docker_registry list_of_pushed_images_tags: list[ServiceInRegistryInfoDict] = [] dependent_images = [] @@ -270,6 +273,7 @@ async def _build_push_images_to_docker_registry( tag="10.52.999999", dependent_image=None, bad_json_format=bad_json_format, + app_settings=app_settings, ) dependent_images.append(dependent_image) @@ -281,6 +285,7 @@ async def _build_push_images_to_docker_registry( tag=f"{version}{image_index}", dependent_image=dependent_image, bad_json_format=bad_json_format, + app_settings=app_settings, ) for image_index in range(number_of_computational_services) ] @@ -294,6 +299,7 @@ async def _build_push_images_to_docker_registry( tag=f"{version}{image_index}", dependent_image=dependent_image, bad_json_format=bad_json_format, + app_settings=app_settings, ) for image_index in range(number_of_interactive_services) ] From 07a39861a969632d99bc1c5214352aa6c001f38d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 13:58:51 +0100 Subject: [PATCH 103/201] fix types --- .../simcore_service_director/cache_request_decorator.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py index 5034e54d011..cfe37082693 100644 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ b/services/director/src/simcore_service_director/cache_request_decorator.py @@ -1,6 +1,6 @@ from collections.abc import Awaitable, Callable, Mapping from functools import wraps -from typing import Any +from typing import Any, cast from fastapi import FastAPI @@ -16,13 +16,17 @@ def cache_requests( async def wrapped( app: FastAPI, url: str, method: str, *args, **kwargs ) -> tuple[dict, Mapping]: + assert hasattr(app.state, "registry_cache") # nosec + assert isinstance(app.state.registry_cache, dict) # nosec app_settings = get_application_settings(app) is_cache_enabled = app_settings.DIRECTOR_REGISTRY_CACHING and method == "GET" cache_key = f"{url}:{method}" if is_cache_enabled and not no_cache: cache_data = app.state.registry_cache if cache_key in cache_data: - return cache_data[cache_key] + return cast( + tuple[dict[str, Any], Mapping[str, Any]], cache_data[cache_key] + ) resp_data, resp_headers = await func(app, url, method, *args, **kwargs) From efd91467811114e8694bbd600d3d6998fdaeaeee Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:23:15 +0100 Subject: [PATCH 104/201] cleanup --- .../simcore_service_director/core/settings.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 28c610e91d0..b0be8ec916b 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -19,7 +19,6 @@ from settings_library.utils_logging import MixinLoggingSettings from .._meta import API_VERSION, API_VTAG, APP_NAME -from ..constants import API_ROOT class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): @@ -119,21 +118,6 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) STORAGE_ENDPOINT: AnyUrl = Field(...) - # TODO: this needs some code changes - # SERVICES_DEFAULT_ENVS: dict[str, str] = { - # "POSTGRES_ENDPOINT": os.environ.get( - # "POSTGRES_ENDPOINT", "undefined postgres endpoint" - # ), - # "POSTGRES_USER": os.environ.get("POSTGRES_USER", "undefined postgres user"), - # "POSTGRES_PASSWORD": os.environ.get( - # "POSTGRES_PASSWORD", "undefined postgres password" - # ), - # "POSTGRES_DB": os.environ.get("POSTGRES_DB", "undefined postgres db"), - # "STORAGE_ENDPOINT": os.environ.get( - # "STORAGE_ENDPOINT", "undefined storage endpoint" - # ), - # } - DIRECTOR_PUBLISHED_HOST_NAME: str = Field( default="", env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] ) @@ -143,11 +127,6 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): env=["DIRECTOR_SWARM_STACK_NAME", "SWARM_STACK_NAME"], ) - # used when in devel mode vs release mode - DIRECTOR_NODE_SCHEMA_LOCATION: str = Field( - default=f"{API_ROOT}/{API_VERSION}/schemas/node-meta-v0.0.1.json", - env=["DIRECTOR_NODE_SCHEMA_LOCATION", "NODE_SCHEMA_LOCATION"], - ) # used to find the right network name DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: str | None = Field( default=None, From e239d0793c6e046ef0e45db01be854a133ee08fe Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:23:22 +0100 Subject: [PATCH 105/201] temp --- services/director/src/simcore_service_director/constants.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py index 22fd12f945d..291d073a70a 100644 --- a/services/director/src/simcore_service_director/constants.py +++ b/services/director/src/simcore_service_director/constants.py @@ -19,3 +19,8 @@ API_ROOT: Final[str] = "api" DIRECTOR_SIMCORE_SERVICES_PREFIX: Final[str] = "simcore/services" + + +# TO remove +# used when in devel mode vs release mode +NODE_SCHEMA_LOCATION: Final[str] = f"{API_ROOT}/v0/schemas/node-meta-v0.0.1.json" From c241ad7e1312e3603bc5f4409d3b2150f87e4fdf Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:23:46 +0100 Subject: [PATCH 106/201] temp --- .../src/simcore_service_director/resources.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/services/director/src/simcore_service_director/resources.py b/services/director/src/simcore_service_director/resources.py index d5471ce39d0..d1fd28bd513 100644 --- a/services/director/src/simcore_service_director/resources.py +++ b/services/director/src/simcore_service_director/resources.py @@ -2,12 +2,12 @@ from pathlib import Path import pkg_resources -from simcore_service_director import config +from .constants import NODE_SCHEMA_LOCATION RESOURCE_OPENAPI_ROOT: str = "api" -RESOURCE_OPEN_API: str = f"{RESOURCE_OPENAPI_ROOT}/{config.API_VERSION}/openapi.yaml" -RESOURCE_NODE_SCHEMA: str = config.NODE_SCHEMA_LOCATION +RESOURCE_OPEN_API: str = f"{RESOURCE_OPENAPI_ROOT}/v0/openapi.yaml" +RESOURCE_NODE_SCHEMA: str = NODE_SCHEMA_LOCATION """ List of pkg_resources functions *bound* to current package with the following signature @@ -27,10 +27,10 @@ def get_path(resource_name: str) -> Path: - """ Returns a path to a resource + """Returns a path to a resource - WARNING: existence of file is not guaranteed. Use resources.exists - WARNING: resource files are supposed to be used as read-only! + WARNING: existence of file is not guaranteed. Use resources.exists + WARNING: resource files are supposed to be used as read-only! """ resource_path = Path(pkg_resources.resource_filename(__name__, resource_name)) return resource_path From 1976f1632b9d1e044930110b8a790bcd09fbb6a4 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:25:03 +0100 Subject: [PATCH 107/201] types --- services/director/src/simcore_service_director/producer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 3a5d59cc6f5..c0d661b36f1 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -7,7 +7,7 @@ from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Final +from typing import Any, Final import aiodocker import aiodocker.networks @@ -187,7 +187,7 @@ async def _create_docker_service_params( "POSTGRES_DB": app_settings.DIRECTOR_POSTGRES.POSTGRES_DB, "STORAGE_ENDPOINT": app_settings.STORAGE_ENDPOINT, } - container_spec = { + container_spec: dict[str, Any] = { "Image": f"{app_settings.DIRECTOR_REGISTRY.resolved_registry_url}/{service_key}:{service_tag}", "Env": { **service_default_envs, @@ -241,7 +241,7 @@ async def _create_docker_service_params( ] # SEE https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate - docker_params = { + docker_params: dict[str, Any] = { "auth": ( await _create_auth(app_settings.DIRECTOR_REGISTRY) if app_settings.DIRECTOR_REGISTRY.REGISTRY_AUTH From 27a80abad62ba1fc9abc9b8fcdef5728ddf5b49d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:42:17 +0100 Subject: [PATCH 108/201] re-added monitoring --- .../instrumentation.py | 72 +++++++++++++++---- .../simcore_service_director/monitoring.py | 30 -------- .../src/simcore_service_director/producer.py | 34 ++++----- 3 files changed, 70 insertions(+), 66 deletions(-) delete mode 100644 services/director/src/simcore_service_director/monitoring.py diff --git a/services/director/src/simcore_service_director/instrumentation.py b/services/director/src/simcore_service_director/instrumentation.py index 08b9693f524..cb63d5f35f5 100644 --- a/services/director/src/simcore_service_director/instrumentation.py +++ b/services/director/src/simcore_service_director/instrumentation.py @@ -1,10 +1,57 @@ +from dataclasses import dataclass, field +from typing import cast + from fastapi import FastAPI +from prometheus_client import CollectorRegistry, Counter from servicelib.fastapi.prometheus_instrumentation import ( setup_prometheus_instrumentation, ) +from servicelib.instrumentation import MetricsBase, get_metrics_namespace +from ._meta import APP_NAME +from .core.errors import ConfigurationError from .core.settings import get_application_settings +MONITOR_SERVICE_STARTED_LABELS: list[str] = [ + "service_key", + "service_tag", + "simcore_user_agent", +] + +MONITOR_SERVICE_STOPPED_LABELS: list[str] = [ + "service_key", + "service_tag", + "result", + "simcore_user_agent", +] + + +@dataclass(slots=True, kw_only=True) +class DirectorV0Instrumentation(MetricsBase): + registry: CollectorRegistry + + services_started: Counter = field(init=False) + services_stopped: Counter = field(init=False) + + def __post_init__(self) -> None: + self.services_started = Counter( + name="services_started_total", + documentation="Counts the services started", + labelnames=MONITOR_SERVICE_STARTED_LABELS, + namespace=get_metrics_namespace(APP_NAME), + subsystem=self.subsystem, + registry=self.registry, + ) + + self.services_stopped = Counter( + name="services_stopped_total", + documentation="Counts the services stopped", + labelnames=MONITOR_SERVICE_STOPPED_LABELS, + namespace=get_metrics_namespace(APP_NAME), + subsystem=self.subsystem, + registry=self.registry, + ) + def setup(app: FastAPI) -> None: app_settings = get_application_settings(app) @@ -15,15 +62,10 @@ def setup(app: FastAPI) -> None: instrumentator = setup_prometheus_instrumentation(app) async def on_startup() -> None: - # metrics_subsystem = ( - # "dynamic" if app_settings.AUTOSCALING_NODES_MONITORING else "computational" - # ) - # app.state.instrumentation = ( - # AutoscalingInstrumentation( # pylint: disable=unexpected-keyword-arg - # registry=instrumentator.registry, subsystem=metrics_subsystem - # ) - # ) - ... + metrics_subsystem = "" + app.state.instrumentation = DirectorV0Instrumentation( + registry=instrumentator.registry, subsystem=metrics_subsystem + ) async def on_shutdown() -> None: ... @@ -32,12 +74,12 @@ async def on_shutdown() -> None: app.add_event_handler("shutdown", on_shutdown) -# def get_instrumentation(app: FastAPI) -> AutoscalingInstrumentation: -# if not app.state.instrumentation: -# raise ConfigurationError( -# msg="Instrumentation not setup. Please check the configuration." -# ) -# return cast(AutoscalingInstrumentation, app.state.instrumentation) +def get_instrumentation(app: FastAPI) -> DirectorV0Instrumentation: + if not app.state.instrumentation: + raise ConfigurationError( + msg="Instrumentation not setup. Please check the configuration." + ) + return cast(DirectorV0Instrumentation, app.state.instrumentation) def has_instrumentation(app: FastAPI) -> bool: diff --git a/services/director/src/simcore_service_director/monitoring.py b/services/director/src/simcore_service_director/monitoring.py deleted file mode 100644 index 91fd841ec64..00000000000 --- a/services/director/src/simcore_service_director/monitoring.py +++ /dev/null @@ -1,30 +0,0 @@ -import prometheus_client -from fastapi import FastAPI -from prometheus_client import CONTENT_TYPE_LATEST -from prometheus_client.registry import CollectorRegistry -from servicelib.monitor_services import ( - add_instrumentation as add_services_instrumentation, -) -from simcore_service_director.core.settings import ApplicationSettings - -kCOLLECTOR_REGISTRY = f"{__name__}.collector_registry" - - -async def metrics_handler(request: web.Request): - # TODO: prometheus_client.generate_latest blocking! -> Consider https://github.com/claws/aioprometheus - reg = request.app[kCOLLECTOR_REGISTRY] - resp = web.Response(body=prometheus_client.generate_latest(registry=reg)) - resp.content_type = CONTENT_TYPE_LATEST - return resp - - -def setup_app_monitoring(app: FastAPI, app_name: str) -> None: - app_settings: ApplicationSettings = app.state.settings - if not app_settings.DIRECTOR_MONITORING_ENABLED: - return - # app-scope registry - app[kCOLLECTOR_REGISTRY] = reg = CollectorRegistry(auto_describe=True) - - add_services_instrumentation(app, reg, app_name) - - app.router.add_get("/metrics", metrics_handler) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index c0d661b36f1..89837cf23af 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -25,8 +25,6 @@ from fastapi import FastAPI from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context - -# from servicelib.monitor_services import service_started, service_stopped from settings_library.docker_registry import RegistrySettings from tenacity import retry from tenacity.retry import retry_if_exception_type @@ -44,6 +42,7 @@ ) from .core.settings import ApplicationSettings, get_application_settings from .exceptions import ServiceStateSaveError +from .instrumentation import get_instrumentation from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw from .utils import parse_as_datetime @@ -1033,15 +1032,12 @@ async def start_service( ) node_details = containers_meta_data[0] if app_settings.DIRECTOR_MONITORING_ENABLED: - ... - # TODO: is monitoring necessary? - # service_started( - # app, - # "undefined_user", # NOTE: to prevent high cardinality metrics this is disabled - # service_key, - # service_tag, - # "DYNAMIC", - # ) + get_instrumentation(app).services_started.labels( + service_key=service_key, + service_tag=service_tag, + simcore_user_agent="undefined_user", + ).inc() + # we return only the info of the main service return node_details @@ -1294,13 +1290,9 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non log.debug("removed network") if app_settings.DIRECTOR_MONITORING_ENABLED: - ... - # TODO: is it necessary still? - # service_stopped( - # app, - # "undefined_user", - # service_details["service_key"], - # service_details["service_version"], - # "DYNAMIC", - # "SUCCESS", - # ) + get_instrumentation(app).services_stopped.labels( + service_key=service_details["service_key"], + service_tag=service_details["service_version"], + simcore_user_agent="undefined_user", + result="SUCCESS", + ).inc() From 7c5a00e48c5012ac67400622d01538a2ebb15218 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:08:04 +0100 Subject: [PATCH 109/201] mypy happy --- .../src/simcore_service_director/producer.py | 23 +++++++++++-------- .../registry_proxy.py | 14 ++++++----- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 89837cf23af..a0b73ddf2c9 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -7,7 +7,7 @@ from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Any, Final +from typing import Any, Final, cast import aiodocker import aiodocker.networks @@ -136,9 +136,9 @@ def _parse_env_settings(settings: list[str]) -> dict: async def _read_service_settings( app: FastAPI, key: str, tag: str, settings_name: str -) -> dict: +) -> dict[str, Any] | list[Any]: image_labels, _ = await registry_proxy.get_image_labels(app, key, tag) - settings = ( + settings: dict[str, Any] | list[Any] = ( json.loads(image_labels[settings_name]) if settings_name in image_labels else {} ) @@ -317,6 +317,7 @@ async def _create_docker_service_params( ] # some services define strip_path:true if they need the path to be stripped away + assert isinstance(reverse_proxy_settings, dict) # nosec if reverse_proxy_settings and reverse_proxy_settings.get("strip_path"): docker_params["labels"][ f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" @@ -329,7 +330,7 @@ async def _create_docker_service_params( placement_substitutions: dict[ str, str ] = app_settings.DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS - + assert isinstance(service_parameters_labels, list) # nosec for param in service_parameters_labels: _check_setting_correctness(param) # replace %service_uuid% by the given uuid @@ -488,12 +489,15 @@ async def _create_docker_service_params( return docker_params -def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: +def _get_service_entrypoint( + service_boot_parameters_labels: list[dict[str, Any]] +) -> str: log.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) if param["name"] == "entry_point": log.debug("Service entrypoint is %s", param["value"]) + assert isinstance(param["value"], str) # nosec return param["value"] return "" @@ -558,7 +562,7 @@ async def _get_docker_image_port_mapping( async def _pass_port_to_service( service_name: str, port: str, - service_boot_parameters_labels: dict, + service_boot_parameters_labels: list[Any], session: ClientSession, app_settings: ApplicationSettings, ) -> None: @@ -608,7 +612,7 @@ async def _create_overlay_network_in_swarm( service_name, node_uuid, ) - return docker_network.id + return cast(str, docker_network.id) except aiodocker.exceptions.DockerError as err: log.exception("Error while creating network for service %s", service_name) msg = "Error while creating network" @@ -872,6 +876,7 @@ async def _start_docker_service( service_boot_parameters_labels = await _read_service_settings( app, service_key, service_tag, SERVICE_RUNTIME_BOOTSETTINGS ) + assert isinstance(service_boot_parameters_labels, list) # nosec service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) if published_port: session = get_client_session(app) @@ -983,8 +988,8 @@ async def _get_service_key_version_from_docker_service( return service_key, service_tag -async def _get_service_basepath_from_docker_service(service: dict) -> str: - envs_list = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] +async def _get_service_basepath_from_docker_service(service: dict[str, Any]) -> str: + envs_list: list[str] = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] envs_dict = dict(x.split("=") for x in envs_list) return envs_dict["SIMCORE_NODE_BASEPATH"] diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 9f6042d738b..34a5ce69279 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -297,20 +297,20 @@ async def get_image_digest(app: FastAPI, image: str, tag: str) -> str | None: async def get_image_labels( app: FastAPI, image: str, tag: str -) -> tuple[dict, str | None]: +) -> tuple[dict[str, str], str | None]: """Returns image labels and the image manifest digest""" logger.debug("getting image labels of %s:%s", image, tag) path = f"/v2/{image}/manifests/{tag}" request_result, headers = await registry_request(app, path) v1_compatibility_key = json.loads(request_result["history"][0]["v1Compatibility"]) - container_config = v1_compatibility_key.get( + container_config: dict[str, Any] = v1_compatibility_key.get( "container_config", v1_compatibility_key["config"] ) - labels = container_config["Labels"] + labels: dict[str, str] = container_config["Labels"] headers = headers or {} - manifest_digest = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) + manifest_digest: str | None = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) logger.debug("retrieved labels of image %s:%s", image, tag) @@ -460,7 +460,7 @@ async def get_service_extras( # check physical node requirements # all nodes require "CPU" app_settings = get_application_settings(app) - result = { + result: dict[str, Any] = { "node_requirements": { "CPU": app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS / 1.0e09, "RAM": app_settings.DIRECTOR_DEFAULT_MAX_MEMORY, @@ -471,7 +471,9 @@ async def get_service_extras( logger.debug("Compiling service extras from labels %s", pformat(labels)) if SERVICE_RUNTIME_SETTINGS in labels: - service_settings = json.loads(labels[SERVICE_RUNTIME_SETTINGS]) + service_settings: list[dict[str, Any]] = json.loads( + labels[SERVICE_RUNTIME_SETTINGS] + ) for entry in service_settings: entry_name = entry.get("name", "").lower() entry_value = entry.get("value") From 9108036413421a60032491a740d6de908f5eeb54 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:23:05 +0100 Subject: [PATCH 110/201] updated Dockerfile --- services/director/Dockerfile | 103 ++++++++++++++++++----------------- 1 file changed, 53 insertions(+), 50 deletions(-) diff --git a/services/director/Dockerfile b/services/director/Dockerfile index 3449ba3db80..247188f6360 100644 --- a/services/director/Dockerfile +++ b/services/director/Dockerfile @@ -1,6 +1,12 @@ # syntax=docker/dockerfile:1 -ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim-buster AS base + +# Define arguments in the global scope +ARG PYTHON_VERSION="3.11.9" +ARG UV_VERSION="0.4" +FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build +# we docker image is built based on debian +FROM python:${PYTHON_VERSION}-slim-bookworm AS base + # # USAGE: # cd sercices/director @@ -14,8 +20,8 @@ LABEL maintainer=sanderegg # for docker apt caching to work this needs to be added: [https://vsupalov.com/buildkit-cache-mount-dockerfile/] RUN rm -f /etc/apt/apt.conf.d/docker-clean && \ echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache -RUN --mount=type=cache,id=basecache36,target=/var/cache/apt,mode=0755,sharing=locked \ - --mount=type=cache,id=baseapt36,target=/var/lib/apt,mode=0755,sharing=locked \ +RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ + --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ @@ -44,22 +50,13 @@ ENV LANG=C.UTF-8 # Turns off writing .pyc files; superfluous on an ephemeral container. ENV PYTHONDONTWRITEBYTECODE=1 \ VIRTUAL_ENV=/home/scu/.venv + # Ensures that the python and pip executables used # in the image will be those from our virtualenv. ENV PATH="${VIRTUAL_ENV}/bin:$PATH" -# environment variables -ENV REGISTRY_AUTH='' \ - REGISTRY_USER='' \ - REGISTRY_PW='' \ - REGISTRY_URL='' \ - REGISTRY_VERSION='v2' \ - PUBLISHED_HOST_NAME='' \ - SIMCORE_SERVICES_NETWORK_NAME='' \ - EXTRA_HOSTS_SUFFIX='undefined' - - -EXPOSE 8080 +EXPOSE 8000 +EXPOSE 3000 # -------------------------- Build stage ------------------- # Installs build/package management tools and third party dependencies @@ -71,36 +68,26 @@ FROM base AS build ENV SC_BUILD_TARGET=build -RUN --mount=type=cache,id=buildbasecache36,target=/var/cache/apt,mode=0755,sharing=locked \ - --mount=type=cache,id=buildbaseapt36,target=/var/lib/apt,mode=0755,sharing=locked \ +RUN --mount=type=cache,target=/var/cache/apt,mode=0755,sharing=private \ + --mount=type=cache,target=/var/lib/apt,mode=0755,sharing=private \ set -eux \ && apt-get update \ && apt-get install -y --no-install-recommends \ - build-essential \ - git + build-essential +# install UV https://docs.astral.sh/uv/guides/integration/docker/#installing-uv +COPY --from=uv_build /uv /uvx /bin/ -# NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv -RUN python -m venv "${VIRTUAL_ENV}" +# NOTE: python virtualenv is used here such that installed +# packages may be moved to production image easily by copying the venv +RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,id=pip36,mode=0755,target=/root/.cache/pip \ - pip install --upgrade \ - pip~=21.3 \ +RUN --mount=type=cache,target=/root/.cache/uv \ + uv pip install --upgrade \ wheel \ setuptools -# install base 3rd party dependencies (NOTE: this speeds up devel mode) -RUN \ - --mount=type=bind,source=packages,target=/build/packages,rw \ - --mount=type=bind,source=services/director,target=/build/services/director,rw \ - pip install \ - -r /build/services/director/requirements/_base.txt - -# FIXME: -# necessary to prevent duplicated files. -# Will be removed when director is refactored using cookiecutter as this will not be necessary anymore -COPY --chown=scu:scu api/specs/director/schemas/node-meta-v0.0.1.json \ - /build/services/director/src/simcore_service_director/api/v0/oas-parts/schemas/node-meta-v0.0.1.json +WORKDIR /build # --------------------------Prod-depends-only stage ------------------- # This stage is for production only dependencies that get partially wiped out afterwards (final docker image concerns) @@ -110,12 +97,18 @@ COPY --chown=scu:scu api/specs/director/schemas/node-meta-v0.0.1.json \ # FROM build AS prod-only-deps -WORKDIR /build/services/director ENV SC_BUILD_TARGET=prod-only-deps + +WORKDIR /build/services/director + RUN \ --mount=type=bind,source=packages,target=/build/packages,rw \ --mount=type=bind,source=services/director,target=/build/services/director,rw \ - pip install -r requirements/prod.txt + --mount=type=cache,target=/root/.cache/uv \ + uv pip sync \ + requirements/prod.txt \ + && uv pip list + # --------------------------Production stage ------------------- # Final cleanup up to reduce image size and startup setup @@ -128,25 +121,32 @@ FROM base AS production ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production + ENV PYTHONOPTIMIZE=TRUE +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu - # ensure home folder is read/writable for user scu RUN chown -R scu /home/scu -# bring installed package without build tools -COPY --from=prod-only-deps --chown=scu:scu ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Starting from clean base image, copies pre-installed virtualenv from prod-only-deps +COPY --chown=scu:scu --from=prod-only-deps ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Copies booting scripts COPY --chown=scu:scu services/director/docker services/director/docker RUN chmod +x services/director/docker/*.sh -HEALTHCHECK --interval=30s \ - --timeout=120s \ +HEALTHCHECK --interval=10s \ + --timeout=5s \ --start-period=30s \ - --retries=3 \ - CMD ["python3", "/home/scu/services/director/docker/healthcheck.py", "http://localhost:8080/v0/"] -ENTRYPOINT [ "services/director/docker/entrypoint.sh" ] -CMD ["services/director/docker/boot.sh"] + --start-interval=1s \ + --retries=5 \ + CMD ["python3", "/home/scu/services/director/docker/healthcheck.py", "http://localhost:8000/v0/"] + +ENTRYPOINT [ "/bin/sh", "services/director/docker/entrypoint.sh" ] +CMD ["/bin/sh", "services/director/docker/boot.sh"] # --------------------------Development stage ------------------- @@ -159,9 +159,12 @@ CMD ["services/director/docker/boot.sh"] # FROM build AS development -ENV SC_BUILD_TARGET=development -ENV NODE_SCHEMA_LOCATION=../../../api/specs/director/schemas/node-meta-v0.0.1.json +ENV SC_BUILD_TARGET=development \ + SC_DEVEL_MOUNT=/devel/services/director + WORKDIR /devel + RUN chown -R scu:scu "${VIRTUAL_ENV}" + ENTRYPOINT [ "/bin/sh", "services/director/docker/entrypoint.sh" ] CMD ["/bin/sh", "services/director/docker/boot.sh"] From 90d42056f0ee367345218ee0c41efd48fbb4a4c5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:33:26 +0100 Subject: [PATCH 111/201] typo --- services/director/docker/boot.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index 0d7122a6681..eba9085247c 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -23,7 +23,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then python --version | sed 's/^/ /' command -v python | sed 's/^/ /' - cd services/autoscaling + cd services/director uv pip --quiet --no-cache-dir sync requirements/dev.txt cd - uv pip list @@ -46,7 +46,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) exec sh -c " - cd services/autoscaling/src/simcore_service_director && \ + cd services/director/src/simcore_service_director && \ python -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ --host 0.0.0.0 \ --reload \ From a5b39ef893a3d75d277e4052a0d0ec413381b646 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:34:13 +0100 Subject: [PATCH 112/201] missing validation of log level --- .../director/src/simcore_service_director/core/settings.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index b0be8ec916b..5c9bd1ed9b6 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -154,6 +154,11 @@ def _validate_substitutions(cls, v): return v + @validator("DIRECTOR_LOGLEVEL", pre=True) + @classmethod + def _valid_log_level(cls, value: str) -> str: + return cls.validate_log_level(value) + def get_application_settings(app: FastAPI) -> ApplicationSettings: return cast(ApplicationSettings, app.state.settings) From 5b8145e1514154be03e2cbf8cf309aaa2210e7cb Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:34:27 +0100 Subject: [PATCH 113/201] missing debugging port --- services/docker-compose.local.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index f0254834105..4327a150043 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -50,6 +50,7 @@ services: director: environment: <<: *common_environment + DIRECTOR_REMOTE_DEBUGGING_PORT : 3000 ports: - "8080" - "3004:3000" From db90448d8fa22c0565604c9782b16052e004c44a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:55:47 +0100 Subject: [PATCH 114/201] pass log_level formatting --- services/docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 45e843ad712..af1f93cbd8b 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -254,6 +254,7 @@ services: DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME} DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: ${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} + DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} EXTRA_HOSTS_SUFFIX: undefined LOGLEVEL: ${LOG_LEVEL:-WARNING} MONITORING_ENABLED: ${MONITORING_ENABLED:-True} From 51ca92bb5796906f4d93d8483fc34c3500140437 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:56:02 +0100 Subject: [PATCH 115/201] storage endpoint is a full endpoint with scheme --- .env-devel | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env-devel b/.env-devel index 02f2a9d939e..a5d35828ab0 100644 --- a/.env-devel +++ b/.env-devel @@ -213,7 +213,7 @@ SMTP_USERNAME=it_doesnt_matter # STORAGE ---- BF_API_KEY=none BF_API_SECRET=none -STORAGE_ENDPOINT=storage:8080 +STORAGE_ENDPOINT=http://storage:8080 STORAGE_HOST=storage STORAGE_LOGLEVEL=INFO STORAGE_PORT=8080 From bd9b0e60ea3ef730fd1cd4da415175b272b1e688 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:56:19 +0100 Subject: [PATCH 116/201] revert --- .env-devel | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env-devel b/.env-devel index a5d35828ab0..02f2a9d939e 100644 --- a/.env-devel +++ b/.env-devel @@ -213,7 +213,7 @@ SMTP_USERNAME=it_doesnt_matter # STORAGE ---- BF_API_KEY=none BF_API_SECRET=none -STORAGE_ENDPOINT=http://storage:8080 +STORAGE_ENDPOINT=storage:8080 STORAGE_HOST=storage STORAGE_LOGLEVEL=INFO STORAGE_PORT=8080 From 649982994194be460528d77698dd15a9716f5517 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:56:48 +0100 Subject: [PATCH 117/201] LOG_LEVEL instead of log_level --- services/director/src/simcore_service_director/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index 173c3e6c9c7..02636e753bf 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -12,8 +12,8 @@ _the_settings = ApplicationSettings.create_from_envs() # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) +logging.basicConfig(level=_the_settings.DIRECTOR_LOGLEVEL) +logging.root.setLevel(_the_settings.DIRECTOR_LOGLEVEL) config_all_loggers( log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, From abb9e3541d7a1b96fa409f6e76edab60613c38c8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:57:06 +0100 Subject: [PATCH 118/201] duplicate --- .../director/src/simcore_service_director/registry_cache_task.py | 1 - 1 file changed, 1 deletion(-) diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py index 1613961748e..3e13c3f3b8d 100644 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ b/services/director/src/simcore_service_director/registry_cache_task.py @@ -74,7 +74,6 @@ async def on_startup() -> None: _logger.info("Registry caching disabled") return - app.state.registry_cache = {} app.state.registry_cache_task = asyncio.get_event_loop().create_task( registry_caching_task(app) ) From b43f5dfa9d3214be48009b5fd018bb75f491e33d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 15:57:39 +0100 Subject: [PATCH 119/201] ruff --- .../director/src/simcore_service_director/registry_proxy.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 34a5ce69279..c966edd159f 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -3,9 +3,10 @@ import json import logging import re +from collections.abc import Mapping from http import HTTPStatus from pprint import pformat -from typing import Any, Mapping +from typing import Any from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout From b1a9a11b6c5f77c2830246b08f9b8be821e8ae96 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 16:11:19 +0100 Subject: [PATCH 120/201] fix order --- .../director/src/simcore_service_director/core/application.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 656437bef02..89e9e2e4507 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -55,8 +55,9 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # replace by httpx client setup_client_session(app) - setup_registry(app) registry_cache_task.setup(app) + setup_registry(app) + setup_instrumentation(app) # ERROR HANDLERS From 24c59aa3ede880700410552e14e1d48593d71ffd Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 16:11:32 +0100 Subject: [PATCH 121/201] fix settings --- .../director/src/simcore_service_director/core/settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 5c9bd1ed9b6..4029ef770f0 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -10,7 +10,7 @@ PortInt, VersionTag, ) -from pydantic import AnyUrl, ByteSize, Field, PositiveInt, parse_obj_as, validator +from pydantic import ByteSize, Field, PositiveInt, parse_obj_as, validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings @@ -116,7 +116,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) DIRECTOR_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) - STORAGE_ENDPOINT: AnyUrl = Field(...) + STORAGE_ENDPOINT: str = Field(..., description="storage endpoint without scheme") DIRECTOR_PUBLISHED_HOST_NAME: str = Field( default="", env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] From f43413f6e3cc102de0a5f1e0be1dca15ac882b49 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 16:22:08 +0100 Subject: [PATCH 122/201] changed director port --- .env-devel | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env-devel b/.env-devel index 02f2a9d939e..bc5164c879c 100644 --- a/.env-devel +++ b/.env-devel @@ -67,7 +67,7 @@ DASK_TLS_KEY=/home/scu/.dask/dask-key.pem DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}' DIRECTOR_HOST=director -DIRECTOR_PORT=8080 +DIRECTOR_PORT=8000 DIRECTOR_REGISTRY_CACHING_TTL=900 DIRECTOR_REGISTRY_CACHING=True From f2f3aba8ddf616941a3a573ffc5fd6d5b5593b12 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:19:32 +0100 Subject: [PATCH 123/201] fixed routes and tasks --- .../api/rest/routes.py | 7 ++- .../registry_proxy.py | 49 +++++++++++-------- services/docker-compose.local.yml | 2 +- 3 files changed, 34 insertions(+), 24 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py index d00722a5b6e..13d2c6866bf 100644 --- a/services/director/src/simcore_service_director/api/rest/routes.py +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -1,12 +1,15 @@ +from typing import Final + from fastapi import APIRouter, FastAPI, HTTPException from servicelib.fastapi.exceptions_utils import ( handle_errors_as_500, http_exception_as_json_response, ) -from ..._meta import API_VTAG from . import _health, _running_interactive_services, _service_extras, _services +_V0_VTAG: Final[str] = "v0" + def setup_api_routes(app: FastAPI): """ @@ -16,7 +19,7 @@ def setup_api_routes(app: FastAPI): app.include_router(_health.router, tags=["operations"]) # include the rest under /vX - api_router = APIRouter(prefix=f"/{API_VTAG}") + api_router = APIRouter(prefix=f"/{_V0_VTAG}") api_router.include_router(_services.router, tags=["services"]) api_router.include_router(_service_extras.router, tags=["services"]) api_router.include_router(_running_interactive_services.router, tags=["services"]) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index c966edd159f..bec90123778 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -1,4 +1,3 @@ -import asyncio import enum import json import logging @@ -6,11 +5,12 @@ from collections.abc import Mapping from http import HTTPStatus from pprint import pformat -from typing import Any +from typing import Any, Final from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout from fastapi import FastAPI +from servicelib.utils import limited_gather from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_result @@ -31,7 +31,7 @@ NUMBER_OF_RETRIEVED_REPOS: int = 50 NUMBER_OF_RETRIEVED_TAGS: int = 50 - +_MAX_CONCURRENT_CALLS: Final[int] = 50 VERSION_REG = re.compile( r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$" ) @@ -318,7 +318,9 @@ async def get_image_labels( return (labels, manifest_digest) -async def get_image_details(app: FastAPI, image_key: str, image_tag: str) -> dict: +async def get_image_details( + app: FastAPI, image_key: str, image_tag: str +) -> dict[str, Any]: image_details: dict = {} labels, image_manifest_digest = await get_image_labels(app, image_key, image_tag) @@ -348,15 +350,16 @@ async def get_image_details(app: FastAPI, image_key: str, image_tag: str) -> dic return image_details -async def get_repo_details(app: FastAPI, image_key: str) -> list[dict]: - repo_details = [] +async def get_repo_details(app: FastAPI, image_key: str) -> list[dict[str, Any]]: + image_tags = await list_image_tags(app, image_key) - tasks = [get_image_details(app, image_key, tag) for tag in image_tags] - results = await asyncio.gather(*tasks) - for image_details in results: - if image_details: - repo_details.append(image_details) - return repo_details + results = await limited_gather( + *[get_image_details(app, image_key, tag) for tag in image_tags], + reraise=False, + log=logger, + limit=_MAX_CONCURRENT_CALLS, + ) + return [result for result in results if not isinstance(result, BaseException)] async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: @@ -372,15 +375,19 @@ async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: logger.debug("retrieved list of repos : %s", repos) # only list as service if it actually contains the necessary labels - tasks = [get_repo_details(app, repo) for repo in repos] - results = await asyncio.gather(*tasks, return_exceptions=True) - services = [] - for repo_details in results: - if repo_details and isinstance(repo_details, list): - services.extend(repo_details) - elif isinstance(repo_details, Exception): - logger.error("Exception occured while listing services %s", repo_details) - return services + results = await limited_gather( + *[get_repo_details(app, repo) for repo in repos], + reraise=False, + log=logger, + limit=_MAX_CONCURRENT_CALLS, + ) + + return [ + service + for repo_details in results + if isinstance(repo_details, list) + for service in repo_details + ] async def list_interactive_service_dependencies( diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index 4327a150043..37bbb3e9b05 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -52,7 +52,7 @@ services: <<: *common_environment DIRECTOR_REMOTE_DEBUGGING_PORT : 3000 ports: - - "8080" + - "8000" - "3004:3000" director-v2: From 3748f936fc0c9570a2541c7d947fb7dbb5a82087 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:32:09 +0100 Subject: [PATCH 124/201] ensure HEAD works --- .../src/simcore_service_director/api/rest/_health.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/api/rest/_health.py b/services/director/src/simcore_service_director/api/rest/_health.py index 78659b036ae..19a00014b33 100644 --- a/services/director/src/simcore_service_director/api/rest/_health.py +++ b/services/director/src/simcore_service_director/api/rest/_health.py @@ -5,7 +5,12 @@ router = APIRouter() -@router.get("/", include_in_schema=True, response_class=PlainTextResponse) +@router.api_route( + "/", + methods=["GET", "HEAD"], + include_in_schema=False, + response_class=PlainTextResponse, +) async def health_check() -> str: # NOTE: sync url in docker/healthcheck.py with this entrypoint! return f"{__name__}.health_check@{arrow.utcnow().isoformat()}" From 54e617888273e91b0f98af035ff6e1cfbb81a5b8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:32:35 +0100 Subject: [PATCH 125/201] also respond with healtcheck on /v0/ --- .../director/src/simcore_service_director/api/rest/routes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/director/src/simcore_service_director/api/rest/routes.py b/services/director/src/simcore_service_director/api/rest/routes.py index 13d2c6866bf..3d789ba02ef 100644 --- a/services/director/src/simcore_service_director/api/rest/routes.py +++ b/services/director/src/simcore_service_director/api/rest/routes.py @@ -17,6 +17,7 @@ def setup_api_routes(app: FastAPI): """ app.include_router(_health.router, tags=["operations"]) + app.include_router(_health.router, tags=["operations"], prefix=f"/{_V0_VTAG}") # include the rest under /vX api_router = APIRouter(prefix=f"/{_V0_VTAG}") From eb4c4bd4ea3237ea32ccc89345024a37eb1a2bbe Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:40:03 +0100 Subject: [PATCH 126/201] spelling error --- .env-devel | 2 ++ services/director/tests/conftest.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.env-devel b/.env-devel index bc5164c879c..d2ed0dbd48b 100644 --- a/.env-devel +++ b/.env-devel @@ -65,6 +65,8 @@ DASK_TLS_CA_FILE=/home/scu/.dask/dask-crt.pem DASK_TLS_CERT=/home/scu/.dask/dask-crt.pem DASK_TLS_KEY=/home/scu/.dask/dask-key.pem +DIRECTOR_DEFAULT_MAX_MEMORY=2_147_483_648 +DIRECTOR_DEFAULT_MAX_NANO_CPUS=1_000_000_000 DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}' DIRECTOR_HOST=director DIRECTOR_PORT=8000 diff --git a/services/director/tests/conftest.py b/services/director/tests/conftest.py index 94b34328756..e17db023533 100644 --- a/services/director/tests/conftest.py +++ b/services/director/tests/conftest.py @@ -135,9 +135,9 @@ def app_settings(app_environment: EnvVarsDict) -> ApplicationSettings: @pytest.fixture async def app( - app_setting: ApplicationSettings, is_pdb_enabled: bool + app_settings: ApplicationSettings, is_pdb_enabled: bool ) -> AsyncIterator[FastAPI]: - the_test_app = create_app(settings=app_setting) + the_test_app = create_app(settings=app_settings) async with LifespanManager( the_test_app, startup_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_STARTUP, From 5091270a85580346fb082c4d8ecbe8860b58f011 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:40:35 +0100 Subject: [PATCH 127/201] udpate envs --- .../director/src/simcore_service_director/core/settings.py | 4 ++-- services/docker-compose.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 4029ef770f0..cb6450277bf 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -73,11 +73,11 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) # migrated settings - DIRECTOR_DEFAULT_MAX_NANO_CPUS: int = Field( + DIRECTOR_DEFAULT_MAX_NANO_CPUS: PositiveInt = Field( default=1 * pow(10, 9), env=["DIRECTOR_DEFAULT_MAX_NANO_CPUS", "DEFAULT_MAX_NANO_CPUS"], ) - DIRECTOR_DEFAULT_MAX_MEMORY: int = Field( + DIRECTOR_DEFAULT_MAX_MEMORY: PositiveInt = Field( default=parse_obj_as(ByteSize, "2GiB"), env=["DIRECTOR_DEFAULT_MAX_MEMORY", "DEFAULT_MAX_MEMORY"], ) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index af1f93cbd8b..e11f92cca42 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -245,8 +245,8 @@ services: init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: - DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY:-0} - DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS:-0} + DIRECTOR_DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY} + DIRECTOR_DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS} DIRECTOR_REGISTRY_CACHING_TTL: ${DIRECTOR_REGISTRY_CACHING_TTL} DIRECTOR_REGISTRY_CACHING: ${DIRECTOR_REGISTRY_CACHING} DIRECTOR_SELF_SIGNED_SSL_FILENAME: ${DIRECTOR_SELF_SIGNED_SSL_FILENAME} From 8f2ae4476bbc15928051ce50d7884bb000124671 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:51:02 +0100 Subject: [PATCH 128/201] some improvements --- .../registry_cache_task.py | 28 ++++++++----------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py index 3e13c3f3b8d..024ed85ce9d 100644 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ b/services/director/src/simcore_service_director/registry_cache_task.py @@ -2,7 +2,8 @@ import logging from fastapi import FastAPI -from servicelib.utils import logged_gather +from servicelib.logging_utils import log_context +from servicelib.utils import limited_gather from . import exceptions, registry_proxy from .core.settings import ApplicationSettings, get_application_settings @@ -15,28 +16,21 @@ async def registry_caching_task(app: FastAPI) -> None: app_settings = get_application_settings(app) try: + with log_context(_logger, logging.INFO, msg=f"{TASK_NAME}: starting"): + assert hasattr(app.state, "registry_cache") # nosec + assert isinstance(app.state.registry_cache, dict) # nosec + app.state.registry_cache.clear() - _logger.info("%s: initializing cache...", TASK_NAME) - assert hasattr(app.state, "registry_cache") # nosec - assert isinstance(app.state.registry_cache, dict) # nosec - app.state.registry_cache.clear() await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) - _logger.info("%s: initialisation completed", TASK_NAME) while True: _logger.info("%s: waking up, refreshing cache...", TASK_NAME) try: - keys = [] - refresh_tasks = [] - for key in app.state.registry_cache: - path, method = key.split(":") - _logger.debug("refresh %s:%s", method, path) - refresh_tasks.append( - registry_proxy.registry_request( - app, path, method, no_cache=True - ) - ) + refresh_tasks = [ + registry_proxy.registry_request(app, key.split(":"), no_cache=True) + for key in app.state.registry_cache + ] keys = list(app.state.registry_cache.keys()) - results = await logged_gather(*refresh_tasks) + results = await limited_gather(*refresh_tasks, log=_logger, limit=50) for key, result in zip(keys, results, strict=False): app.state.registry_cache[key] = result From a8bc7a5ba7d866946a0fca9c54d72cb5ec1a8b49 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:52:47 +0100 Subject: [PATCH 129/201] fixes test_settings --- .env-devel | 8 ++++++++ services/director/tests/test_core_settings.py | 5 ++++- services/docker-compose.yml | 6 +++--- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.env-devel b/.env-devel index d2ed0dbd48b..6e878bf5138 100644 --- a/.env-devel +++ b/.env-devel @@ -69,9 +69,16 @@ DIRECTOR_DEFAULT_MAX_MEMORY=2_147_483_648 DIRECTOR_DEFAULT_MAX_NANO_CPUS=1_000_000_000 DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}' DIRECTOR_HOST=director +DIRECTOR_LOGLEVEL=INFO +DIRECTOR_MONITORING_ENABLED=True DIRECTOR_PORT=8000 +DIRECTOR_PUBLISHED_HOST_NAME= DIRECTOR_REGISTRY_CACHING_TTL=900 DIRECTOR_REGISTRY_CACHING=True +DIRECTOR_SELF_SIGNED_SSL_FILENAME= +DIRECTOR_SELF_SIGNED_SSL_SECRET_ID= +DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME= +DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS= EFS_USER_ID=8006 EFS_USER_NAME=efs @@ -173,6 +180,7 @@ REDIS_SECURE=false REDIS_USER=null REGISTRY_AUTH=True +REGISTRY_PATH="" REGISTRY_PW=adminadminadmin REGISTRY_SSL=True REGISTRY_URL=registry.osparc-master.speag.com diff --git a/services/director/tests/test_core_settings.py b/services/director/tests/test_core_settings.py index 24b07909702..7c0caf3129f 100644 --- a/services/director/tests/test_core_settings.py +++ b/services/director/tests/test_core_settings.py @@ -21,4 +21,7 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): assert settings == ApplicationSettings.create_from_envs() - assert app_environment["DIRECTOR_DEBUG"] == settings.DIRECTOR_DEBUG + assert ( + app_environment["DIRECTOR_DEFAULT_MAX_MEMORY"] + == settings.DIRECTOR_DEFAULT_MAX_MEMORY + ) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index e11f92cca42..96ef66dd7cc 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -256,9 +256,9 @@ services: DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} EXTRA_HOSTS_SUFFIX: undefined - LOGLEVEL: ${LOG_LEVEL:-WARNING} - MONITORING_ENABLED: ${MONITORING_ENABLED:-True} - PUBLISHED_HOST_NAME: ${MACHINE_FQDN} + DIRECTOR_LOGLEVEL: ${DIRECTOR_LOGLEVEL} + DIRECTOR_MONITORING_ENABLED: ${DIRECTOR_MONITORING_ENABLED} + DIRECTOR_PUBLISHED_HOST_NAME: ${DIRECTOR_PUBLISHED_HOST_NAME} POSTGRES_DB: ${POSTGRES_DB} POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} POSTGRES_HOST: ${POSTGRES_HOST} From 17bbe29d0925c85bbf521389f03deae3e8f387c6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:28:51 +0100 Subject: [PATCH 130/201] regenerated reqs --- services/director/requirements/_base.in | 2 +- services/director/requirements/_base.txt | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/services/director/requirements/_base.in b/services/director/requirements/_base.in index 2c0510eb962..468bb684525 100644 --- a/services/director/requirements/_base.in +++ b/services/director/requirements/_base.in @@ -11,7 +11,7 @@ --requirement ../../../packages/service-library/requirements/_base.in --requirement ../../../packages/service-library/requirements/_fastapi.in - +aiocache aiodocker fastapi[all] httpx diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index ea607f5efec..df2ae5c98d9 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -1,7 +1,9 @@ aio-pika==9.4.3 # via -r requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 - # via -r requirements/../../../packages/service-library/requirements/_base.in + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/_base.in aiodebug==2.3.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.23.0 @@ -161,6 +163,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions @@ -178,11 +181,14 @@ opentelemetry-instrumentation==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.48b0 # via opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-redis==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-proto==1.27.0 @@ -199,6 +205,7 @@ opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk opentelemetry-util-http==0.48b0 @@ -370,6 +377,7 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-redis yarl==1.17.1 # via # aio-pika From 9369cb5398cd491d4e86f0f15a7e80a078294e2f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:31:16 +0100 Subject: [PATCH 131/201] replaced home made caching by aiocache --- .../cache_request_decorator.py | 42 ---------- .../core/application.py | 2 - .../registry_cache_task.py | 84 ------------------- .../registry_proxy.py | 22 ++++- 4 files changed, 20 insertions(+), 130 deletions(-) delete mode 100644 services/director/src/simcore_service_director/cache_request_decorator.py delete mode 100644 services/director/src/simcore_service_director/registry_cache_task.py diff --git a/services/director/src/simcore_service_director/cache_request_decorator.py b/services/director/src/simcore_service_director/cache_request_decorator.py deleted file mode 100644 index cfe37082693..00000000000 --- a/services/director/src/simcore_service_director/cache_request_decorator.py +++ /dev/null @@ -1,42 +0,0 @@ -from collections.abc import Awaitable, Callable, Mapping -from functools import wraps -from typing import Any, cast - -from fastapi import FastAPI - -from .core.settings import get_application_settings - - -def cache_requests( - func: Callable[..., Awaitable[tuple[dict[str, Any], Mapping[str, Any]]]], - *, - no_cache: bool = False, -) -> Callable[..., Awaitable[tuple[dict[str, Any], Mapping[str, Any]]]]: - @wraps(func) - async def wrapped( - app: FastAPI, url: str, method: str, *args, **kwargs - ) -> tuple[dict, Mapping]: - assert hasattr(app.state, "registry_cache") # nosec - assert isinstance(app.state.registry_cache, dict) # nosec - app_settings = get_application_settings(app) - is_cache_enabled = app_settings.DIRECTOR_REGISTRY_CACHING and method == "GET" - cache_key = f"{url}:{method}" - if is_cache_enabled and not no_cache: - cache_data = app.state.registry_cache - if cache_key in cache_data: - return cast( - tuple[dict[str, Any], Mapping[str, Any]], cache_data[cache_key] - ) - - resp_data, resp_headers = await func(app, url, method, *args, **kwargs) - - if is_cache_enabled and not no_cache: - cache_data = app.state.registry_cache - cache_data[cache_key] = (resp_data, resp_headers) - - return (resp_data, resp_headers) - - return wrapped - - -__all__ = ["cache_requests"] diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 89e9e2e4507..c8aa7a8fc14 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -4,7 +4,6 @@ from fastapi import FastAPI from servicelib.fastapi.tracing import setup_tracing -from .. import registry_cache_task from .._meta import ( API_VERSION, API_VTAG, @@ -55,7 +54,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # replace by httpx client setup_client_session(app) - registry_cache_task.setup(app) setup_registry(app) setup_instrumentation(app) diff --git a/services/director/src/simcore_service_director/registry_cache_task.py b/services/director/src/simcore_service_director/registry_cache_task.py deleted file mode 100644 index 024ed85ce9d..00000000000 --- a/services/director/src/simcore_service_director/registry_cache_task.py +++ /dev/null @@ -1,84 +0,0 @@ -import asyncio -import logging - -from fastapi import FastAPI -from servicelib.logging_utils import log_context -from servicelib.utils import limited_gather - -from . import exceptions, registry_proxy -from .core.settings import ApplicationSettings, get_application_settings - -_logger = logging.getLogger(__name__) - -TASK_NAME: str = __name__ + "_registry_caching_task" - - -async def registry_caching_task(app: FastAPI) -> None: - app_settings = get_application_settings(app) - try: - with log_context(_logger, logging.INFO, msg=f"{TASK_NAME}: starting"): - assert hasattr(app.state, "registry_cache") # nosec - assert isinstance(app.state.registry_cache, dict) # nosec - app.state.registry_cache.clear() - - await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) - while True: - _logger.info("%s: waking up, refreshing cache...", TASK_NAME) - try: - refresh_tasks = [ - registry_proxy.registry_request(app, key.split(":"), no_cache=True) - for key in app.state.registry_cache - ] - keys = list(app.state.registry_cache.keys()) - results = await limited_gather(*refresh_tasks, log=_logger, limit=50) - - for key, result in zip(keys, results, strict=False): - app.state.registry_cache[key] = result - - except exceptions.DirectorException: - # if the registry is temporarily not available this might happen - _logger.exception( - "%s: exception while refreshing cache, clean cache...", TASK_NAME - ) - app.state.registry_cache.clear() - - _logger.info( - "cache refreshed %s: sleeping for %ss...", - TASK_NAME, - app_settings.DIRECTOR_REGISTRY_CACHING_TTL, - ) - await asyncio.sleep( - app_settings.DIRECTOR_REGISTRY_CACHING_TTL.total_seconds() - ) - except asyncio.CancelledError: - _logger.info("%s: cancelling task...", TASK_NAME) - except Exception: # pylint: disable=broad-except - _logger.exception("%s: Unhandled exception while refreshing cache", TASK_NAME) - finally: - _logger.info("%s: finished task...clearing cache...", TASK_NAME) - app.state.registry_cache.clear() - - -def setup(app: FastAPI) -> None: - async def on_startup() -> None: - app.state.registry_cache = {} - app.state.registry_cache_task = None - app_settings: ApplicationSettings = app.state.settings - if not app_settings.DIRECTOR_REGISTRY_CACHING: - _logger.info("Registry caching disabled") - return - - app.state.registry_cache_task = asyncio.get_event_loop().create_task( - registry_caching_task(app) - ) - - async def on_shutdown() -> None: - if app.state.registry_cache_task: - app.state.registry_cache_task.cancel() - await app.state.registry_cache_task - - app.add_event_handler("startup", on_startup) - app.add_event_handler("shutdown", on_shutdown) - - -__all__ = ["setup"] diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index bec90123778..ca743bff797 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -7,6 +7,7 @@ from pprint import pformat from typing import Any, Final +from aiocache import Cache, SimpleMemoryCache from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout from fastapi import FastAPI @@ -18,7 +19,6 @@ from yarl import URL from . import exceptions -from .cache_request_decorator import cache_requests from .client_session import get_client_session from .constants import ( DIRECTOR_SIMCORE_SERVICES_PREFIX, @@ -202,10 +202,25 @@ async def registry_request( logger.debug( "Request to registry: path=%s, method=%s. no_cache=%s", path, method, no_cache ) - return await cache_requests(_basic_auth_registry_request, no_cache=no_cache)( + cache: SimpleMemoryCache = app.state.registry_cache_memory + cache_key = f"{method}_{path}" + if not no_cache and (cached_response := await cache.get(cache_key)): + return cached_response + + app_settings = get_application_settings(app) + response, response_headers = await _basic_auth_registry_request( app, path, method, **session_kwargs ) + if not no_cache and app_settings.DIRECTOR_REGISTRY_CACHING and method == "GET": + await cache.set( + cache_key, + (response, response_headers), + ttl=app_settings.DIRECTOR_REGISTRY_CACHING_TTL.total_seconds(), + ) + + return response, response_headers + async def _is_registry_responsive(app: FastAPI) -> bool: path = "/v2/" @@ -237,6 +252,9 @@ async def wait_until_registry_responsive(app: FastAPI) -> bool: def setup(app: FastAPI) -> None: async def on_startup() -> None: + cache = Cache(Cache.MEMORY) + assert isinstance(cache, SimpleMemoryCache) # nosec + app.state.registry_cache_memory = cache await _setup_registry(app) async def on_shutdown() -> None: From 1c84f71edec69ebbfda4998197b4878905802dcc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:35:42 +0100 Subject: [PATCH 132/201] mypy happy --- .../src/simcore_service_director/registry_proxy.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index ca743bff797..1cdf0362315 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -5,9 +5,9 @@ from collections.abc import Mapping from http import HTTPStatus from pprint import pformat -from typing import Any, Final +from typing import Any, Final, cast -from aiocache import Cache, SimpleMemoryCache +from aiocache import Cache, SimpleMemoryCache # type: ignore[import-untyped] from aiohttp import BasicAuth, ClientSession, client_exceptions from aiohttp.client import ClientTimeout from fastapi import FastAPI @@ -205,7 +205,8 @@ async def registry_request( cache: SimpleMemoryCache = app.state.registry_cache_memory cache_key = f"{method}_{path}" if not no_cache and (cached_response := await cache.get(cache_key)): - return cached_response + assert isinstance(tuple[dict, Mapping], cached_response) # nosec + return cast(tuple[dict, Mapping], cached_response) app_settings = get_application_settings(app) response, response_headers = await _basic_auth_registry_request( From 9d08c65ee5afcc7d7da4c39bfccf1eb1202c0a11 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 22:15:56 +0100 Subject: [PATCH 133/201] fixed test --- .env-devel | 4 ++-- services/director/tests/test_core_settings.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.env-devel b/.env-devel index 78047abb758..ee47dac5be3 100644 --- a/.env-devel +++ b/.env-devel @@ -65,8 +65,8 @@ DASK_TLS_CA_FILE=/home/scu/.dask/dask-crt.pem DASK_TLS_CERT=/home/scu/.dask/dask-crt.pem DASK_TLS_KEY=/home/scu/.dask/dask-key.pem -DIRECTOR_DEFAULT_MAX_MEMORY=2_147_483_648 -DIRECTOR_DEFAULT_MAX_NANO_CPUS=1_000_000_000 +DIRECTOR_DEFAULT_MAX_MEMORY=2147483648 +DIRECTOR_DEFAULT_MAX_NANO_CPUS=1000000000 DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}' DIRECTOR_HOST=director DIRECTOR_LOGLEVEL=INFO diff --git a/services/director/tests/test_core_settings.py b/services/director/tests/test_core_settings.py index 7c0caf3129f..beaa0cd3056 100644 --- a/services/director/tests/test_core_settings.py +++ b/services/director/tests/test_core_settings.py @@ -23,5 +23,5 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): assert ( app_environment["DIRECTOR_DEFAULT_MAX_MEMORY"] - == settings.DIRECTOR_DEFAULT_MAX_MEMORY + == f"{settings.DIRECTOR_DEFAULT_MAX_MEMORY}" ) From 54a26148f8dc75aa32da3f83ff672fc84137162f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 22:44:27 +0100 Subject: [PATCH 134/201] moved tests to unit subfolder fixed some tests, added some TODOs --- .../director/tests/test_dummy_services.py | 24 ---- .../tests/test_registry_cache_task.py | 67 ---------- .../director/tests/{ => unit}/api/conftest.py | 0 .../tests/{ => unit}/api/test_rest_health.py | 0 .../test_rest_running_interactive_services.py | 58 ++++---- .../api/test_rest_service_extras.py | 0 .../{ => unit}/api/test_rest_services.py | 0 .../director/tests/{ => unit}/conftest.py | 0 .../dummy_service_description-v1.json | 2 +- .../{ => unit}/fixtures/fake_services.py | 0 .../tests/{ => unit}/test__model_examples.py | 0 .../director/tests/{ => unit}/test_cli.py | 0 .../tests/{ => unit}/test_core_settings.py | 0 .../tests/{ => unit}/test_docker_utils.py | 0 .../tests/unit/test_dummy_services.py | 22 +++ .../tests/{ => unit}/test_json_schemas.py | 0 .../director/tests/{ => unit}/test_oas.py | 0 .../tests/{ => unit}/test_producer.py | 126 +++++++++++------- .../tests/unit/test_registry_cache_task.py | 67 ++++++++++ .../tests/{ => unit}/test_registry_proxy.py | 70 +++++----- .../director/tests/{ => unit}/test_utils.py | 0 21 files changed, 233 insertions(+), 203 deletions(-) delete mode 100644 services/director/tests/test_dummy_services.py delete mode 100644 services/director/tests/test_registry_cache_task.py rename services/director/tests/{ => unit}/api/conftest.py (100%) rename services/director/tests/{ => unit}/api/test_rest_health.py (100%) rename services/director/tests/{ => unit}/api/test_rest_running_interactive_services.py (85%) rename services/director/tests/{ => unit}/api/test_rest_service_extras.py (100%) rename services/director/tests/{ => unit}/api/test_rest_services.py (100%) rename services/director/tests/{ => unit}/conftest.py (100%) rename services/director/tests/{ => unit}/fixtures/dummy_service_description-v1.json (96%) rename services/director/tests/{ => unit}/fixtures/fake_services.py (100%) rename services/director/tests/{ => unit}/test__model_examples.py (100%) rename services/director/tests/{ => unit}/test_cli.py (100%) rename services/director/tests/{ => unit}/test_core_settings.py (100%) rename services/director/tests/{ => unit}/test_docker_utils.py (100%) create mode 100644 services/director/tests/unit/test_dummy_services.py rename services/director/tests/{ => unit}/test_json_schemas.py (100%) rename services/director/tests/{ => unit}/test_oas.py (100%) rename services/director/tests/{ => unit}/test_producer.py (76%) create mode 100644 services/director/tests/unit/test_registry_cache_task.py rename services/director/tests/{ => unit}/test_registry_proxy.py (81%) rename services/director/tests/{ => unit}/test_utils.py (100%) diff --git a/services/director/tests/test_dummy_services.py b/services/director/tests/test_dummy_services.py deleted file mode 100644 index 80e142c4601..00000000000 --- a/services/director/tests/test_dummy_services.py +++ /dev/null @@ -1,24 +0,0 @@ -# pylint: disable=bare-except -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-import - -import json -import logging - -from helpers import json_schema_validator - -log = logging.getLogger(__name__) - - -async def test_services_conformity(configure_schemas_location, push_services): - from simcore_service_director import resources - - services = await push_services(1, 1) - with resources.stream(resources.RESOURCE_NODE_SCHEMA) as file_pt: - service_schema = json.load(file_pt) - for service in services: - # validate service - json_schema_validator.validate_instance_object( - service["service_description"], service_schema - ) diff --git a/services/director/tests/test_registry_cache_task.py b/services/director/tests/test_registry_cache_task.py deleted file mode 100644 index e0272798204..00000000000 --- a/services/director/tests/test_registry_cache_task.py +++ /dev/null @@ -1,67 +0,0 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -from asyncio import sleep - -import pytest -from simcore_service_director import config, main, registry_cache_task, registry_proxy - - -@pytest.fixture -def client( - loop, - aiohttp_client, - aiohttp_unused_port, - configure_schemas_location, - configure_registry_access, -): - config.DIRECTOR_REGISTRY_CACHING = True - config.DIRECTOR_REGISTRY_CACHING_TTL = 5 - # config.DIRECTOR_REGISTRY_CACHING_TTL = 5 - app = main.setup_app() - server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - - registry_cache_task.setup(app) - - return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) - - -async def test_registry_caching_task(client, push_services): - app = client.app - assert app - - # check the task is started - assert registry_cache_task.TASK_NAME in app - # check the registry cache is empty (no calls yet) - assert registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY in app - - # check we do not get any repository - list_of_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert not list_of_services - assert app[registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY] != {} - # create services in the registry - pushed_services = await push_services( - number_of_computational_services=1, number_of_interactive_services=1 - ) - # the services shall be updated - await sleep( - config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 - ) # NOTE: this can take some time. Sleep increased by 10%. - list_of_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert len(list_of_services) == 2 - # add more - pushed_services = await push_services( - number_of_computational_services=2, - number_of_interactive_services=2, - version="2.0.", - ) - await sleep( - config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 - ) # NOTE: this sometimes takes a bit more. Sleep increased a 10%. - list_of_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert len(list_of_services) == len(pushed_services) diff --git a/services/director/tests/api/conftest.py b/services/director/tests/unit/api/conftest.py similarity index 100% rename from services/director/tests/api/conftest.py rename to services/director/tests/unit/api/conftest.py diff --git a/services/director/tests/api/test_rest_health.py b/services/director/tests/unit/api/test_rest_health.py similarity index 100% rename from services/director/tests/api/test_rest_health.py rename to services/director/tests/unit/api/test_rest_health.py diff --git a/services/director/tests/api/test_rest_running_interactive_services.py b/services/director/tests/unit/api/test_rest_running_interactive_services.py similarity index 85% rename from services/director/tests/api/test_rest_running_interactive_services.py rename to services/director/tests/unit/api/test_rest_running_interactive_services.py index 98c13317871..87cfd34ee61 100644 --- a/services/director/tests/api/test_rest_running_interactive_services.py +++ b/services/director/tests/unit/api/test_rest_running_interactive_services.py @@ -7,7 +7,8 @@ import httpx import pytest -from aioresponses.core import CallbackResult, aioresponses + +# from aioresponses.core import CallbackResult, aioresponses from fastapi import status @@ -179,35 +180,36 @@ async def test_running_services_post_and_delete( if save_state: query_params.update({"save_state": "true" if save_state else "false"}) - mocked_save_state_cb = mocker.MagicMock( - return_value=CallbackResult(status=200, payload={}) - ) - PASSTHROUGH_REQUESTS_PREFIXES = [ - "http://127.0.0.1", - "http://localhost", - "unix://", # docker engine - "ws://", # websockets - ] - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: + # TODO: replace with respx?? + # mocked_save_state_cb = mocker.MagicMock( + # return_value=CallbackResult(status=200, payload={}) + # ) + # PASSTHROUGH_REQUESTS_PREFIXES = [ + # "http://127.0.0.1", + # "http://localhost", + # "unix://", # docker engine + # "ws://", # websockets + # ] + # with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- - mock.post( - f"http://{service_host}:{service_port}{service_basepath}/state", - status=200, - callback=mocked_save_state_cb, - ) - resp = await client.delete( - f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - params=query_params, - ) - if expected_save_state_call: - mocked_save_state_cb.assert_called_once() + # # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- + # mock.post( + # f"http://{service_host}:{service_port}{service_basepath}/state", + # status=200, + # callback=mocked_save_state_cb, + # ) + # resp = await client.delete( + # f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + # params=query_params, + # ) + # if expected_save_state_call: + # mocked_save_state_cb.assert_called_once() - text = resp.text - assert resp.status_code == status.HTTP_204_NO_CONTENT, text - assert resp.encoding == "application/json" - data = resp.json() - assert data is None + # text = resp.text + # assert resp.status_code == status.HTTP_204_NO_CONTENT, text + # assert resp.encoding == "application/json" + # data = resp.json() + # assert data is None async def test_running_interactive_services_list_get( diff --git a/services/director/tests/api/test_rest_service_extras.py b/services/director/tests/unit/api/test_rest_service_extras.py similarity index 100% rename from services/director/tests/api/test_rest_service_extras.py rename to services/director/tests/unit/api/test_rest_service_extras.py diff --git a/services/director/tests/api/test_rest_services.py b/services/director/tests/unit/api/test_rest_services.py similarity index 100% rename from services/director/tests/api/test_rest_services.py rename to services/director/tests/unit/api/test_rest_services.py diff --git a/services/director/tests/conftest.py b/services/director/tests/unit/conftest.py similarity index 100% rename from services/director/tests/conftest.py rename to services/director/tests/unit/conftest.py diff --git a/services/director/tests/fixtures/dummy_service_description-v1.json b/services/director/tests/unit/fixtures/dummy_service_description-v1.json similarity index 96% rename from services/director/tests/fixtures/dummy_service_description-v1.json rename to services/director/tests/unit/fixtures/dummy_service_description-v1.json index e7e0f4907ca..f68f21a15d6 100644 --- a/services/director/tests/fixtures/dummy_service_description-v1.json +++ b/services/director/tests/unit/fixtures/dummy_service_description-v1.json @@ -55,4 +55,4 @@ "type": "data:application/json" } } -} \ No newline at end of file +} diff --git a/services/director/tests/fixtures/fake_services.py b/services/director/tests/unit/fixtures/fake_services.py similarity index 100% rename from services/director/tests/fixtures/fake_services.py rename to services/director/tests/unit/fixtures/fake_services.py diff --git a/services/director/tests/test__model_examples.py b/services/director/tests/unit/test__model_examples.py similarity index 100% rename from services/director/tests/test__model_examples.py rename to services/director/tests/unit/test__model_examples.py diff --git a/services/director/tests/test_cli.py b/services/director/tests/unit/test_cli.py similarity index 100% rename from services/director/tests/test_cli.py rename to services/director/tests/unit/test_cli.py diff --git a/services/director/tests/test_core_settings.py b/services/director/tests/unit/test_core_settings.py similarity index 100% rename from services/director/tests/test_core_settings.py rename to services/director/tests/unit/test_core_settings.py diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/unit/test_docker_utils.py similarity index 100% rename from services/director/tests/test_docker_utils.py rename to services/director/tests/unit/test_docker_utils.py diff --git a/services/director/tests/unit/test_dummy_services.py b/services/director/tests/unit/test_dummy_services.py new file mode 100644 index 00000000000..255d563e6c0 --- /dev/null +++ b/services/director/tests/unit/test_dummy_services.py @@ -0,0 +1,22 @@ +# pylint: disable=bare-except +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-import + + +# from helpers import json_schema_validator + +# log = logging.getLogger(__name__) + + +# async def test_services_conformity(configure_schemas_location, push_services): +# from simcore_service_director import resources + +# services = await push_services(1, 1) +# with resources.stream(resources.RESOURCE_NODE_SCHEMA) as file_pt: +# service_schema = json.load(file_pt) +# for service in services: +# # validate service +# json_schema_validator.validate_instance_object( +# service["service_description"], service_schema +# ) diff --git a/services/director/tests/test_json_schemas.py b/services/director/tests/unit/test_json_schemas.py similarity index 100% rename from services/director/tests/test_json_schemas.py rename to services/director/tests/unit/test_json_schemas.py diff --git a/services/director/tests/test_oas.py b/services/director/tests/unit/test_oas.py similarity index 100% rename from services/director/tests/test_oas.py rename to services/director/tests/unit/test_oas.py diff --git a/services/director/tests/test_producer.py b/services/director/tests/unit/test_producer.py similarity index 76% rename from services/director/tests/test_producer.py rename to services/director/tests/unit/test_producer.py index 620e624a663..38dd97631da 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -8,36 +8,56 @@ import uuid from collections.abc import Callable from dataclasses import dataclass +from typing import Any, AsyncIterator, Awaitable, Iterator import docker +import docker.models.networks import pytest -from simcore_service_director import config, exceptions, producer +from fastapi import FastAPI +from models_library.projects import ProjectID +from models_library.users import UserID +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from settings_library.docker_registry import RegistrySettings +from simcore_service_director import exceptions, producer +from simcore_service_director.constants import ( + CPU_RESOURCE_LIMIT_KEY, + MEM_RESOURCE_LIMIT_KEY, +) +from simcore_service_director.core.settings import ApplicationSettings from tenacity import Retrying from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed @pytest.fixture -def ensure_service_runs_in_ci(monkeypatch): - monkeypatch.setattr(config, "DEFAULT_MAX_MEMORY", int(25 * pow(1024, 2))) - monkeypatch.setattr(config, "DEFAULT_MAX_NANO_CPUS", int(0.01 * pow(10, 9))) +def ensure_service_runs_in_ci(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + envs={ + "DEFAULT_MAX_MEMORY": int(25 * pow(1024, 2)), + "DEFAULT_MAX_NANO_CPUS": int(0.01 * pow(10, 9)), + }, + ) @pytest.fixture async def run_services( - ensure_service_runs_in_ci, - aiohttp_mock_app, - configure_registry_access, - configure_schemas_location, + ensure_service_runs_in_ci: EnvVarsDict, + configure_registry_access: EnvVarsDict, + configure_schemas_location: EnvVarsDict, + app: FastAPI, push_services, - docker_swarm, - user_id, - project_id, + docker_swarm: None, + user_id: UserID, + project_id: ProjectID, docker_client: docker.client.DockerClient, -) -> Callable: +) -> AsyncIterator[Callable[[int, int], Awaitable[list[dict[str, Any]]]]]: started_services = [] - async def push_start_services(number_comp: int, number_dyn: int, dependant=False): + async def push_start_services( + number_comp: int, number_dyn: int, dependant=False + ) -> list[dict[str, Any]]: pushed_services = await push_services( number_comp, number_dyn, inter_dependent_services=dependant ) @@ -51,12 +71,12 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False service_uuid = str(uuid.uuid1()) service_basepath = "/my/base/path" with pytest.raises(exceptions.ServiceUUIDNotFoundError): - await producer.get_service_details(aiohttp_mock_app, service_uuid) + await producer.get_service_details(app, service_uuid) # start the service started_service = await producer.start_service( - aiohttp_mock_app, - user_id, - project_id, + app, + f"{user_id}", + f"{project_id}", service_key, service_version, service_uuid, @@ -84,9 +104,7 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False assert "service_message" in started_service # wait for service to be running - node_details = await producer.get_service_details( - aiohttp_mock_app, service_uuid - ) + node_details = await producer.get_service_details(app, service_uuid) max_time = 60 for attempt in Retrying( wait=wait_fixed(1), stop=stop_after_delay(max_time), reraise=True @@ -95,9 +113,7 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False print( f"--> waiting for {started_service['service_key']}:{started_service['service_version']} to run..." ) - node_details = await producer.get_service_details( - aiohttp_mock_app, service_uuid - ) + node_details = await producer.get_service_details(app, service_uuid) print( f"<-- {started_service['service_key']}:{started_service['service_version']} state is {node_details['service_state']} using {config.DEFAULT_MAX_MEMORY}Bytes, {config.DEFAULT_MAX_NANO_CPUS}nanocpus" ) @@ -123,9 +139,9 @@ async def push_start_services(number_comp: int, number_dyn: int, dependant=False # NOTE: Fake services are not even web-services therefore we cannot # even emulate a legacy dy-service that does not implement a save-state feature # so here we must make save_state=False - await producer.stop_service(aiohttp_mock_app, service_uuid, save_state=False) + await producer.stop_service(app, node_uuid=service_uuid, save_state=False) with pytest.raises(exceptions.ServiceUUIDNotFoundError): - await producer.get_service_details(aiohttp_mock_app, service_uuid) + await producer.get_service_details(app, service_uuid) async def test_find_service_tag(): @@ -143,31 +159,41 @@ async def test_find_service_tag(): ] } with pytest.raises(exceptions.ServiceNotAvailableError): - await producer._find_service_tag(list_of_images, "some_wrong_key", None) + await producer._find_service_tag( # noqa: SLF001 + list_of_images, "some_wrong_key", None + ) with pytest.raises(exceptions.ServiceNotAvailableError): - await producer._find_service_tag( + await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, "some wrong key" ) # get the latest (e.g. 2.11.0) - latest_version = await producer._find_service_tag( + latest_version = await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, None ) assert latest_version == "2.11.0" - latest_version = await producer._find_service_tag( + latest_version = await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, "latest" ) assert latest_version == "2.11.0" # get a specific version - await producer._find_service_tag(list_of_images, my_service_key, "1.2.3") + await producer._find_service_tag( # noqa: SLF001 + list_of_images, my_service_key, "1.2.3" + ) -async def test_start_stop_service(docker_network, run_services): +async def test_start_stop_service( + docker_network: docker.models.networks.Network, + run_services: Callable[..., Awaitable[list[dict[str, Any]]]], +): # standard test await run_services(number_comp=1, number_dyn=1) async def test_service_assigned_env_variables( - docker_network, run_services, user_id, project_id + docker_network: docker.models.networks.Network, + run_services: Callable[..., Awaitable[list[dict[str, Any]]]], + user_id: UserID, + project_id: ProjectID, ): started_services = await run_services(number_comp=1, number_dyn=1) client = docker.from_env() @@ -202,8 +228,8 @@ async def test_service_assigned_env_variables( assert "SIMCORE_HOST_NAME" in envs_dict assert envs_dict["SIMCORE_HOST_NAME"] == docker_service.name - assert config.MEM_RESOURCE_LIMIT_KEY in envs_dict - assert config.CPU_RESOURCE_LIMIT_KEY in envs_dict + assert MEM_RESOURCE_LIMIT_KEY in envs_dict + assert CPU_RESOURCE_LIMIT_KEY in envs_dict async def test_interactive_service_published_port(docker_network, run_services): @@ -233,13 +259,16 @@ async def test_interactive_service_published_port(docker_network, run_services): @pytest.fixture def docker_network( - docker_client: docker.client.DockerClient, docker_swarm: None -) -> docker.models.networks.Network: + app_settings: ApplicationSettings, + docker_client: docker.client.DockerClient, + docker_swarm: None, +) -> Iterator[docker.models.networks.Network]: network = docker_client.networks.create( "test_network_default", driver="overlay", scope="swarm" ) print(f"--> docker network '{network.name}' created") - config.SIMCORE_SERVICES_NETWORK_NAME = network.name + # TODO: should probably be done via monkeypatch actually... + app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = network.name yield network # cleanup @@ -249,10 +278,10 @@ def docker_network( for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(1)): with attempt: list_networks = docker_client.networks.list( - config.SIMCORE_SERVICES_NETWORK_NAME + app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME ) assert not list_networks - config.SIMCORE_SERVICES_NETWORK_NAME = None + app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = None print(f"<-- removed docker network '{network.name}'") @@ -305,6 +334,11 @@ class FakeDockerService: expected_tag: str +@pytest.fixture +def registry_settings(app_settings: ApplicationSettings) -> RegistrySettings: + return app_settings.DIRECTOR_REGISTRY + + @pytest.mark.parametrize( "fake_service", [ @@ -321,13 +355,14 @@ class FakeDockerService: ], ) async def test_get_service_key_version_from_docker_service( + registry_settings: RegistrySettings, fake_service: FakeDockerService, ): docker_service_partial_inspect = { "Spec": { "TaskTemplate": { "ContainerSpec": { - "Image": f"{config.REGISTRY_PATH}{fake_service.service_str}" + "Image": f"{registry_settings.resolved_registry_url}{fake_service.service_str}" } } } @@ -335,8 +370,8 @@ async def test_get_service_key_version_from_docker_service( ( service_key, service_tag, - ) = await producer._get_service_key_version_from_docker_service( - docker_service_partial_inspect + ) = await producer._get_service_key_version_from_docker_service( # noqa: SLF001 + docker_service_partial_inspect, registry_settings ) assert service_key == fake_service.expected_key assert service_tag == fake_service.expected_tag @@ -352,18 +387,19 @@ async def test_get_service_key_version_from_docker_service( ], ) async def test_get_service_key_version_from_docker_service_except_invalid_keys( + registry_settings: RegistrySettings, fake_service_str: str, ): docker_service_partial_inspect = { "Spec": { "TaskTemplate": { "ContainerSpec": { - "Image": f"{config.REGISTRY_PATH if fake_service_str.startswith('/') else ''}{fake_service_str}" + "Image": f"{registry_settings.resolved_registry_url if fake_service_str.startswith('/') else ''}{fake_service_str}" } } } } with pytest.raises(exceptions.DirectorException): - await producer._get_service_key_version_from_docker_service( - docker_service_partial_inspect + await producer._get_service_key_version_from_docker_service( # noqa: SLF001 + docker_service_partial_inspect, registry_settings ) diff --git a/services/director/tests/unit/test_registry_cache_task.py b/services/director/tests/unit/test_registry_cache_task.py new file mode 100644 index 00000000000..6d611e92a32 --- /dev/null +++ b/services/director/tests/unit/test_registry_cache_task.py @@ -0,0 +1,67 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + + +# TODO: replace with aiocache - done already +# from simcore_service_director import config, main, registry_cache_task, registry_proxy + + +# @pytest.fixture +# def client( +# loop, +# aiohttp_client, +# aiohttp_unused_port, +# configure_schemas_location, +# configure_registry_access, +# ): +# config.DIRECTOR_REGISTRY_CACHING = True +# config.DIRECTOR_REGISTRY_CACHING_TTL = 5 +# # config.DIRECTOR_REGISTRY_CACHING_TTL = 5 +# app = main.setup_app() +# server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} + +# registry_cache_task.setup(app) + +# return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) + + +# async def test_registry_caching_task(client, push_services): +# app = client.app +# assert app + +# # check the task is started +# assert registry_cache_task.TASK_NAME in app +# # check the registry cache is empty (no calls yet) +# assert registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY in app + +# # check we do not get any repository +# list_of_services = await registry_proxy.list_services( +# app, registry_proxy.ServiceType.ALL +# ) +# assert not list_of_services +# assert app[registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY] != {} +# # create services in the registry +# pushed_services = await push_services( +# number_of_computational_services=1, number_of_interactive_services=1 +# ) +# # the services shall be updated +# await sleep( +# config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 +# ) # NOTE: this can take some time. Sleep increased by 10%. +# list_of_services = await registry_proxy.list_services( +# app, registry_proxy.ServiceType.ALL +# ) +# assert len(list_of_services) == 2 +# # add more +# pushed_services = await push_services( +# number_of_computational_services=2, +# number_of_interactive_services=2, +# version="2.0.", +# ) +# await sleep( +# config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 +# ) # NOTE: this sometimes takes a bit more. Sleep increased a 10%. +# list_of_services = await registry_proxy.list_services( +# app, registry_proxy.ServiceType.ALL +# ) +# assert len(list_of_services) == len(pushed_services) diff --git a/services/director/tests/test_registry_proxy.py b/services/director/tests/unit/test_registry_proxy.py similarity index 81% rename from services/director/tests/test_registry_proxy.py rename to services/director/tests/unit/test_registry_proxy.py index f59cb2c428f..4c60dcde667 100644 --- a/services/director/tests/test_registry_proxy.py +++ b/services/director/tests/unit/test_registry_proxy.py @@ -5,32 +5,33 @@ import time import pytest -from simcore_service_director import config, registry_proxy +from fastapi import FastAPI +from simcore_service_director import registry_proxy async def test_list_no_services_available( - aiohttp_mock_app, + app: FastAPI, docker_registry, configure_registry_access, configure_schemas_location, ): computational_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.COMPUTATIONAL + app, registry_proxy.ServiceType.COMPUTATIONAL ) assert not computational_services # it's empty interactive_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.DYNAMIC + app, registry_proxy.ServiceType.DYNAMIC ) assert not interactive_services all_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL + app, registry_proxy.ServiceType.ALL ) assert not all_services async def test_list_services_with_bad_json_formatting( - aiohttp_mock_app, + app: FastAPI, docker_registry, configure_registry_access, configure_schemas_location, @@ -44,21 +45,21 @@ async def test_list_services_with_bad_json_formatting( ) assert len(created_services) == 5 computational_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.COMPUTATIONAL + app, registry_proxy.ServiceType.COMPUTATIONAL ) assert not computational_services # it's empty interactive_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.DYNAMIC + app, registry_proxy.ServiceType.DYNAMIC ) assert not interactive_services all_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL + app, registry_proxy.ServiceType.ALL ) assert not all_services async def test_list_computational_services( - aiohttp_mock_app, + app: FastAPI, docker_registry, push_services, configure_registry_access, @@ -69,13 +70,13 @@ async def test_list_computational_services( ) computational_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.COMPUTATIONAL + app, registry_proxy.ServiceType.COMPUTATIONAL ) assert len(computational_services) == 6 async def test_list_interactive_services( - aiohttp_mock_app, + app: FastAPI, docker_registry, push_services, configure_registry_access, @@ -85,13 +86,13 @@ async def test_list_interactive_services( number_of_computational_services=5, number_of_interactive_services=4 ) interactive_services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.DYNAMIC + app, registry_proxy.ServiceType.DYNAMIC ) assert len(interactive_services) == 4 async def test_list_of_image_tags( - aiohttp_mock_app, + app: FastAPI, docker_registry, push_services, configure_registry_access, @@ -109,12 +110,12 @@ async def test_list_of_image_tags( image_number[key] = image_number[key] + 1 for key, number in image_number.items(): - list_of_image_tags = await registry_proxy.list_image_tags(aiohttp_mock_app, key) + list_of_image_tags = await registry_proxy.list_image_tags(app, key) assert len(list_of_image_tags) == number async def test_list_interactive_service_dependencies( - aiohttp_mock_app, + app: FastAPI, docker_registry, push_services, configure_registry_access, @@ -134,7 +135,7 @@ async def test_list_interactive_service_dependencies( ) image_dependencies = ( await registry_proxy.list_interactive_service_dependencies( - aiohttp_mock_app, + app, service_description["key"], service_description["version"], ) @@ -146,7 +147,7 @@ async def test_list_interactive_service_dependencies( async def test_get_image_labels( - aiohttp_mock_app, + app: FastAPI, docker_registry, push_services, configure_registry_access, @@ -159,7 +160,7 @@ async def test_get_image_labels( for image in images: service_description = image["service_description"] labels, image_manifest_digest = await registry_proxy.get_image_labels( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert "io.simcore.key" in labels assert "io.simcore.version" in labels @@ -175,7 +176,7 @@ async def test_get_image_labels( assert "simcore.service.settings" in labels assert image_manifest_digest == await registry_proxy.get_image_digest( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert image_manifest_digest is not None assert image_manifest_digest not in images_digests @@ -221,7 +222,7 @@ def test_get_service_last_namess(): async def test_get_image_details( - aiohttp_mock_app, + app: FastAPI, push_services, configure_registry_access, configure_schemas_location, @@ -232,7 +233,7 @@ async def test_get_image_details( for image in images: service_description = image["service_description"] details = await registry_proxy.get_image_details( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert details.pop("image_digest").startswith("sha") @@ -241,7 +242,7 @@ async def test_get_image_details( async def test_registry_caching( - aiohttp_mock_app, + app: FastAPI, push_services, configure_registry_access, configure_schemas_location, @@ -249,30 +250,23 @@ async def test_registry_caching( images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 ) - config.DIRECTOR_REGISTRY_CACHING = True + # TODO: use monkeypatching + # config.DIRECTOR_REGISTRY_CACHING = True start_time = time.perf_counter() - services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) + services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) time_to_retrieve_without_cache = time.perf_counter() - start_time assert len(services) == len(images) start_time = time.perf_counter() - services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) + services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) time_to_retrieve_with_cache = time.perf_counter() - start_time assert len(services) == len(images) assert time_to_retrieve_with_cache < time_to_retrieve_without_cache @pytest.mark.skip(reason="test needs credentials to real registry") -async def test_get_services_performance( - aiohttp_mock_app, loop, configure_custom_registry -): +async def test_get_services_performance(app, loop, configure_custom_registry): start_time = time.perf_counter() - services = await registry_proxy.list_services( - aiohttp_mock_app, registry_proxy.ServiceType.ALL - ) + services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) stop_time = time.perf_counter() print( f"\nTime to run getting services: {stop_time - start_time}s, #services {len(services)}, time per call {(stop_time - start_time) / len(services)}s/service" @@ -280,7 +274,7 @@ async def test_get_services_performance( async def test_generate_service_extras( - aiohttp_mock_app, + app, push_services, configure_registry_access, configure_schemas_location, @@ -294,7 +288,7 @@ async def test_generate_service_extras( service_extras = image["service_extras"] extras = await registry_proxy.get_service_extras( - aiohttp_mock_app, service_description["key"], service_description["version"] + app, service_description["key"], service_description["version"] ) assert extras == service_extras diff --git a/services/director/tests/test_utils.py b/services/director/tests/unit/test_utils.py similarity index 100% rename from services/director/tests/test_utils.py rename to services/director/tests/unit/test_utils.py From 86c306282c04559a7c7c3c89c1c935828bf96426 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 5 Nov 2024 23:23:18 +0100 Subject: [PATCH 135/201] test getting greener --- .../tests/unit/api/test_rest_health.py | 6 ++- .../test_rest_running_interactive_services.py | 43 ++++++++++++------- services/director/tests/unit/conftest.py | 42 ++++++------------ 3 files changed, 46 insertions(+), 45 deletions(-) diff --git a/services/director/tests/unit/api/test_rest_health.py b/services/director/tests/unit/api/test_rest_health.py index b1e6db622a4..7a429c668ff 100644 --- a/services/director/tests/unit/api/test_rest_health.py +++ b/services/director/tests/unit/api/test_rest_health.py @@ -7,7 +7,11 @@ from fastapi import status -async def test_healthcheck(client: httpx.AsyncClient, api_version_prefix: str): +async def test_healthcheck( + configure_registry_access, + client: httpx.AsyncClient, + api_version_prefix: str, +): resp = await client.get(f"/{api_version_prefix}/") assert resp.is_success diff --git a/services/director/tests/unit/api/test_rest_running_interactive_services.py b/services/director/tests/unit/api/test_rest_running_interactive_services.py index 87cfd34ee61..f7ad8ea07b8 100644 --- a/services/director/tests/unit/api/test_rest_running_interactive_services.py +++ b/services/director/tests/unit/api/test_rest_running_interactive_services.py @@ -7,9 +7,12 @@ import httpx import pytest +from faker import Faker # from aioresponses.core import CallbackResult, aioresponses from fastapi import status +from models_library.projects import ProjectID +from models_library.users import UserID def _assert_response_and_unwrap_envelope(got: httpx.Response): @@ -50,26 +53,28 @@ async def test_running_services_post_and_delete_no_swarm( ) async def test_running_services_post_and_delete( configure_swarm_stack_name, + configure_registry_access, client: httpx.AsyncClient, push_services, - docker_swarm, - user_id, - project_id, - api_version_prefix, + docker_swarm: None, + user_id: UserID, + project_id: ProjectID, + api_version_prefix: str, save_state: bool | None, expected_save_state_call: bool, mocker, + faker: Faker, ): params = {} resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", + "user_id": f"{faker.pyint(min_value=1)}", + "project_id": f"{faker.uuid4()}", + "service_uuid": f"{faker.uuid4()}", "service_key": "None", "service_tag": "None", # optional "service_basepath": "None", # optional @@ -78,31 +83,39 @@ async def test_running_services_post_and_delete( f"/{api_version_prefix}/running_interactive_services", params=params ) data = resp.json() - assert resp.status_code == status.HTTP_400_BAD_REQUEST, data + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, data + + fake_headers = {"x-simcore-user-agent": faker.pystr()} params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" params["service_tag"] = "1.2.3" resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params + f"/{api_version_prefix}/running_interactive_services", + params=params, + headers=fake_headers, ) data = resp.json() assert resp.status_code == status.HTTP_404_NOT_FOUND, data - created_services = await push_services(0, 2) + created_services = await push_services( + number_of_computational_services=0, number_of_interactive_services=2 + ) assert len(created_services) == 2 for created_service in created_services: service_description = created_service["service_description"] - params["user_id"] = user_id - params["project_id"] = project_id + params["user_id"] = f"{user_id}" + params["project_id"] = f"{project_id}" params["service_key"] = service_description["key"] params["service_tag"] = service_description["version"] service_port = created_service["internal_port"] service_entry_point = created_service["entry_point"] params["service_basepath"] = "/i/am/a/basepath" - params["service_uuid"] = str(uuid.uuid4()) + params["service_uuid"] = f"{faker.uuid4()}" # start the service resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params + f"/{api_version_prefix}/running_interactive_services", + params=params, + headers=fake_headers, ) assert resp.status_code == status.HTTP_201_CREATED assert resp.encoding == "application/json" diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py index e17db023533..d9a1ec8f3ce 100644 --- a/services/director/tests/unit/conftest.py +++ b/services/director/tests/unit/conftest.py @@ -57,16 +57,20 @@ def common_schemas_specs_dir(osparc_simcore_root_dir: Path) -> Path: return specs_dir -@pytest.fixture(scope="session") -def configure_swarm_stack_name(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: - return setenvs_from_dict(monkeypatch, envs={"SWARM_STACK_NAME": "test_stack"}) +@pytest.fixture +def configure_swarm_stack_name( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, envs={"SWARM_STACK_NAME": "test_stack"} + ) @pytest.fixture def configure_registry_access( - monkeypatch: pytest.MonkeyPatch, docker_registry: str + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, docker_registry: str ) -> EnvVarsDict: - return setenvs_from_dict( + return app_environment | setenvs_from_dict( monkeypatch, envs={ "REGISTRY_URL": docker_registry, @@ -79,7 +83,9 @@ def configure_registry_access( @pytest.fixture(scope="session") def configure_custom_registry( - monkeypatch: pytest.MonkeyPatch, pytestconfig: pytest.Config + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + pytestconfig: pytest.Config, ) -> EnvVarsDict: # to set these values call # pytest --registry_url myregistry --registry_user username --registry_pw password @@ -92,7 +98,7 @@ def configure_custom_registry( registry_pw = pytestconfig.getoption("registry_pw") assert registry_pw assert isinstance(registry_pw, str) - return setenvs_from_dict( + return app_environment | setenvs_from_dict( monkeypatch, envs={ "REGISTRY_URL": registry_url, @@ -144,25 +150,3 @@ async def app( shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, ): yield the_test_app - - -# @pytest.fixture -# async def aiohttp_mock_app(loop, mocker): -# print("client session started ...") -# session = ClientSession() - -# mock_app_storage = { -# config.APP_CLIENT_SESSION_KEY: session, -# config.APP_REGISTRY_CACHE_DATA_KEY: {}, -# } - -# def _get_item(self, key): -# return mock_app_storage[key] - -# aiohttp_app = mocker.patch("aiohttp.web.Application") -# aiohttp_app.__getitem__ = _get_item - -# yield aiohttp_app - -# # cleanup session -# await session.close() From 724117cf39b9219b1f9bf2b83dbd6c5c0a48b67e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:05:46 +0100 Subject: [PATCH 136/201] fixed unit testing script --- ci/github/unit-testing/director.bash | 44 ++++++++++------------------ 1 file changed, 16 insertions(+), 28 deletions(-) diff --git a/ci/github/unit-testing/director.bash b/ci/github/unit-testing/director.bash index 6098c8fb29b..787f29116a3 100755 --- a/ci/github/unit-testing/director.bash +++ b/ci/github/unit-testing/director.bash @@ -6,44 +6,32 @@ set -o pipefail # don't hide errors within pipes IFS=$'\n\t' install() { - # Replaces 'bash ci/helpers/ensure_python_pip.bash' - - echo "INFO:" "$(python --version)" "@" "$(command -v python)" - - # installs pip if not in place - python -m ensurepip - - echo "INFO:" "$(pip --version)" "@" "$(command -v pip)" - # NOTE: pip<22.0 for python 3.6 - pip3 install --upgrade \ - pip~=21.0 \ - wheel \ - setuptools - python3 -m venv .venv + make devenv # shellcheck source=/dev/null source .venv/bin/activate pushd services/director - pip3 install -r requirements/ci.txt + make install-ci popd + uv pip list } test() { # shellcheck source=/dev/null source .venv/bin/activate + # tests without DB can be safely run in parallel + pushd services/director + make test-ci-unit pytest-parameters="--numprocesses=auto --ignore-glob=**/with_dbs/**" + # these tests cannot be run in parallel + make test-ci-unit test-path=with_dbs + popd +} + +typecheck() { + # shellcheck source=/dev/null + source .venv/bin/activate + uv pip install mypy pushd services/director - pytest \ - --color=yes \ - --cov-append \ - --cov-config=.coveragerc \ - --cov-report=term-missing \ - --cov-report=xml \ - --cov=simcore_service_director \ - --durations=10 \ - --keep-docker-up \ - --log-date-format="%Y-%m-%d %H:%M:%S" \ - --log-format="%(asctime)s %(levelname)s %(message)s" \ - --verbose \ - tests/ + make mypy popd } From 04c494a65790bd9a314e92e7d1eac254c83d6eea Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:08:20 +0100 Subject: [PATCH 137/201] fixed CI for director-v0 --- .github/workflows/ci-testing-deploy.yml | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 34f17a07d85..c32cd3a4ae7 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -189,6 +189,8 @@ jobs: - 'packages/**' - 'services/director/**' - 'services/docker-compose*' + - 'scripts/mypy/*' + - 'mypy.ini' director-v2: - 'packages/**' - 'services/director-v2/**' @@ -912,11 +914,8 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - # KEEP 3.6 Development of this service is frozen - # KEEP ubuntu 20.04, else no python 3.6 - python: [3.6] - os: [ubuntu-20.04] - docker_buildx: [v0.10.4] + python: ["3.11"] + os: [ubuntu-22.04] fail-fast: false steps: - uses: actions/checkout@v4 @@ -929,12 +928,27 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} + - name: install uv + uses: astral-sh/setup-uv@v3 + with: + version: "0.4.x" + enable-cache: false + cache-dependency-glob: "**/director/requirements/ci.txt" - name: show system version run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/director.bash install + - name: typecheck + run: ./ci/github/unit-testing/director.bash typecheck - name: test + if: ${{ !cancelled() }} run: ./ci/github/unit-testing/director.bash test + - name: upload failed tests logs + if: ${{ !cancelled() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ github.job }}_docker_logs + path: ./services/director/test_failures - uses: codecov/codecov-action@v4.6.0 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} From 0a2e089c31b2ea5ab184dd228a2c99f7763a0339 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:12:19 +0100 Subject: [PATCH 138/201] fixed bash script --- ci/github/unit-testing/director.bash | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/ci/github/unit-testing/director.bash b/ci/github/unit-testing/director.bash index 787f29116a3..a29764642ee 100755 --- a/ci/github/unit-testing/director.bash +++ b/ci/github/unit-testing/director.bash @@ -18,11 +18,8 @@ install() { test() { # shellcheck source=/dev/null source .venv/bin/activate - # tests without DB can be safely run in parallel pushd services/director - make test-ci-unit pytest-parameters="--numprocesses=auto --ignore-glob=**/with_dbs/**" - # these tests cannot be run in parallel - make test-ci-unit test-path=with_dbs + make test-ci-unit popd } From 941cd94eac35de2bb71ca6f2a584b1ecc2227d1b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:17:21 +0100 Subject: [PATCH 139/201] removed fixture --- services/director/tests/unit/test_docker_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/services/director/tests/unit/test_docker_utils.py b/services/director/tests/unit/test_docker_utils.py index 3c9180f88cb..c4588620ffc 100644 --- a/services/director/tests/unit/test_docker_utils.py +++ b/services/director/tests/unit/test_docker_utils.py @@ -61,7 +61,6 @@ async def test_swarm_has_worker_nodes(docker_swarm: None): async def test_push_services( push_services: Callable, configure_registry_access: None, - configure_schemas_location: None, ): await push_services( number_of_computational_services=3, number_of_interactive_services=3 From 56a0f03733f1182372e01c247910894e2aa5b2f7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:19:08 +0100 Subject: [PATCH 140/201] upgraded requirements --- services/director/requirements/_base.txt | 39 ++++++++++++----------- services/director/requirements/_test.txt | 1 + services/director/requirements/_tools.txt | 5 ++- 3 files changed, 23 insertions(+), 22 deletions(-) diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index df2ae5c98d9..70d87951345 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -126,7 +126,7 @@ idna==3.10 # httpx # requests # yarl -importlib-metadata==8.4.0 +importlib-metadata==8.5.0 # via opentelemetry-api itsdangerous==2.2.0 # via fastapi @@ -155,7 +155,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.27.0 +opentelemetry-api==1.28.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -167,48 +167,49 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.27.0 +opentelemetry-exporter-otlp==1.28.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.27.0 +opentelemetry-exporter-otlp-proto-common==1.28.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.27.0 +opentelemetry-exporter-otlp-proto-grpc==1.28.0 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.27.0 +opentelemetry-exporter-otlp-proto-http==1.28.0 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.48b0 +opentelemetry-instrumentation==0.49b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.48b0 +opentelemetry-instrumentation-asgi==0.49b0 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.48b0 +opentelemetry-instrumentation-fastapi==0.49b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-redis==0.48b0 +opentelemetry-instrumentation-redis==0.49b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.48b0 +opentelemetry-instrumentation-requests==0.49b0 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.27.0 +opentelemetry-proto==1.28.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.27.0 +opentelemetry-sdk==1.28.0 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.48b0 +opentelemetry-semantic-conventions==0.49b0 # via + # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.48b0 +opentelemetry-util-http==0.49b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -224,6 +225,8 @@ orjson==3.10.11 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # fastapi +packaging==24.1 + # via opentelemetry-instrumentation pamqp==3.3.0 # via aiormq prometheus-client==0.21.0 @@ -235,7 +238,7 @@ prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in propcache==0.2.0 # via yarl -protobuf==4.25.5 +protobuf==5.28.3 # via # googleapis-common-protos # opentelemetry-proto @@ -302,12 +305,10 @@ rich==13.9.4 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # typer -rpds-py==0.20.1 +rpds-py==0.21.0 # via # jsonschema # referencing -setuptools==75.3.0 - # via opentelemetry-instrumentation shellingham==1.5.4 # via typer six==1.16.0 diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 1ccd0e2a907..02cf32c761a 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -50,6 +50,7 @@ jsonref==1.1.0 # via -r requirements/_test.in packaging==24.1 # via + # -c requirements/_base.txt # pytest # pytest-sugar pluggy==1.5.0 diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index 3c83af3ad3e..4270bf693f1 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -37,6 +37,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.1 # via + # -c requirements/_base.txt # -c requirements/_test.txt # black # build @@ -68,9 +69,7 @@ pyyaml==6.0.2 ruff==0.7.2 # via -r requirements/../../../requirements/devenv.txt setuptools==75.3.0 - # via - # -c requirements/_base.txt - # pip-tools + # via pip-tools tomlkit==0.13.2 # via pylint typing-extensions==4.12.2 From 9c04de1c8e78f4c5253d267f31880b442802c85c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:21:42 +0100 Subject: [PATCH 141/201] fixed test_cli --- services/director/src/simcore_service_director/cli.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/services/director/src/simcore_service_director/cli.py b/services/director/src/simcore_service_director/cli.py index 4b6beb2a800..f2e16f6b97e 100644 --- a/services/director/src/simcore_service_director/cli.py +++ b/services/director/src/simcore_service_director/cli.py @@ -14,3 +14,13 @@ create_settings_command(settings_cls=ApplicationSettings, logger=_logger) ) main.callback()(create_version_callback(__version__)) + + +@main.command() +def run(): + """Runs application""" + typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") + typer.secho( + "$ uvicorn simcore_service_director.main:the_app", + fg=typer.colors.BLUE, + ) From e087eec1476f83063a942e478a53987474cbb39c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:40:44 +0100 Subject: [PATCH 142/201] fixing tests --- .../src/pytest_simcore/docker.py | 6 ++ .../src/pytest_simcore/docker_swarm.py | 42 ++++++++++++- services/autoscaling/tests/unit/conftest.py | 7 +-- .../src/simcore_service_director/producer.py | 44 ++++++++----- services/director/tests/unit/conftest.py | 1 + services/director/tests/unit/test_producer.py | 63 ++++++++++--------- .../tests/unit/test_registry_proxy.py | 10 --- .../osparc-gateway-server/tests/conftest.py | 48 ++------------ 8 files changed, 112 insertions(+), 109 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker.py b/packages/pytest-simcore/src/pytest_simcore/docker.py index 9b0a36cb1d8..9a5d5f26bdf 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker.py @@ -7,6 +7,12 @@ import pytest +@pytest.fixture +async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: + async with aiodocker.Docker() as docker_client: + yield docker_client + + @contextlib.asynccontextmanager async def _pause_container( async_docker_client: aiodocker.Docker, container_name: str diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index b0c3f1e62f0..223a0b647e5 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -10,13 +10,15 @@ from collections.abc import Iterator from contextlib import suppress from pathlib import Path -from typing import Any +from typing import Any, AsyncIterator, Awaitable, Callable +import aiodocker import docker import pytest import yaml from docker.errors import APIError -from tenacity import Retrying, TryAgain, retry +from faker import Faker +from tenacity import AsyncRetrying, Retrying, TryAgain, retry from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -390,3 +392,39 @@ async def _check_all_services_are_running(): raise _ResourceStillNotRemovedError(msg) _fetch_and_print_services(docker_client, "[AFTER REMOVED]") + + +@pytest.fixture +async def docker_network( + async_docker_client: aiodocker.Docker, faker: Faker +) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: + networks = [] + + async def _network_creator(**network_config_kwargs) -> dict[str, Any]: + network = await async_docker_client.networks.create( + config={"Name": faker.uuid4(), "Driver": "overlay"} | network_config_kwargs + ) + assert network + print(f"--> created network {network=}") + networks.append(network) + return await network.show() + + yield _network_creator + + # wait until all networks are really gone + async def _wait_for_network_deletion(network: aiodocker.docker.DockerNetwork): + network_name = (await network.show())["Name"] + await network.delete() + async for attempt in AsyncRetrying( + reraise=True, wait=wait_fixed(1), stop=stop_after_delay(60) + ): + with attempt: + print(f"<-- waiting for network '{network_name}' deletion...") + list_of_network_names = [ + n["Name"] for n in await async_docker_client.networks.list() + ] + assert network_name not in list_of_network_names + print(f"<-- network '{network_name}' deleted") + + print(f"<-- removing all networks {networks=}") + await asyncio.gather(*[_wait_for_network_deletion(network) for network in networks]) diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 9876a3a1c20..f71f5b69c8e 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -86,6 +86,7 @@ "pytest_simcore.aws_iam_service", "pytest_simcore.aws_ssm_service", "pytest_simcore.dask_scheduler", + "pytest_simcore.docker", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", @@ -433,12 +434,6 @@ async def autoscaling_docker() -> AsyncIterator[AutoscalingDocker]: yield cast(AutoscalingDocker, docker_client) -@pytest.fixture -async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: - async with aiodocker.Docker() as docker_client: - yield docker_client - - @pytest.fixture async def host_node( docker_swarm: None, diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index a0b73ddf2c9..15cd7491bb6 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -136,10 +136,12 @@ def _parse_env_settings(settings: list[str]) -> dict: async def _read_service_settings( app: FastAPI, key: str, tag: str, settings_name: str -) -> dict[str, Any] | list[Any]: +) -> dict[str, Any] | list[Any] | None: image_labels, _ = await registry_proxy.get_image_labels(app, key, tag) - settings: dict[str, Any] | list[Any] = ( - json.loads(image_labels[settings_name]) if settings_name in image_labels else {} + settings: dict[str, Any] | list[Any] | None = ( + json.loads(image_labels[settings_name]) + if settings_name in image_labels + else None ) log.debug("Retrieved %s settings: %s", settings_name, pformat(settings)) @@ -317,8 +319,11 @@ async def _create_docker_service_params( ] # some services define strip_path:true if they need the path to be stripped away - assert isinstance(reverse_proxy_settings, dict) # nosec - if reverse_proxy_settings and reverse_proxy_settings.get("strip_path"): + if ( + isinstance(reverse_proxy_settings, dict) + and reverse_proxy_settings + and reverse_proxy_settings.get("strip_path") + ): docker_params["labels"][ f"traefik.http.middlewares.{service_name}_stripprefixregex.stripprefixregex.regex" ] = f"^/x/{node_uuid}" @@ -876,17 +881,18 @@ async def _start_docker_service( service_boot_parameters_labels = await _read_service_settings( app, service_key, service_tag, SERVICE_RUNTIME_BOOTSETTINGS ) - assert isinstance(service_boot_parameters_labels, list) # nosec - service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) - if published_port: - session = get_client_session(app) - await _pass_port_to_service( - service_name, - published_port, - service_boot_parameters_labels, - session, - app_settings=app_settings, - ) + service_entrypoint = "" + if isinstance(service_boot_parameters_labels, list): + service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) + if published_port: + session = get_client_session(app) + await _pass_port_to_service( + service_name, + published_port, + service_boot_parameters_labels, + session, + app_settings=app_settings, + ) return { "published_port": published_port, @@ -1065,7 +1071,11 @@ async def _get_node_details( ) service_boot_parameters_labels = results[0] - service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) + service_entrypoint = "" + if service_boot_parameters_labels and isinstance( + service_boot_parameters_labels, list + ): + service_entrypoint = _get_service_entrypoint(service_boot_parameters_labels) service_basepath = results[1] service_state, service_msg = results[2] service_name = service["Spec"]["Name"] diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py index d9a1ec8f3ce..02211a16bc1 100644 --- a/services/director/tests/unit/conftest.py +++ b/services/director/tests/unit/conftest.py @@ -18,6 +18,7 @@ pytest_plugins = [ "fixtures.fake_services", "pytest_simcore.cli_runner", + "pytest_simcore.docker", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 38dd97631da..64b07f3e672 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -8,7 +8,7 @@ import uuid from collections.abc import Callable from dataclasses import dataclass -from typing import Any, AsyncIterator, Awaitable, Iterator +from typing import Any, AsyncIterator, Awaitable import docker import docker.models.networks @@ -35,8 +35,8 @@ def ensure_service_runs_in_ci(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, envs={ - "DEFAULT_MAX_MEMORY": int(25 * pow(1024, 2)), - "DEFAULT_MAX_NANO_CPUS": int(0.01 * pow(10, 9)), + "DEFAULT_MAX_MEMORY": f"{int(25 * pow(1024, 2))}", + "DEFAULT_MAX_NANO_CPUS": f"{int(0.01 * pow(10, 9))}", }, ) @@ -45,7 +45,6 @@ def ensure_service_runs_in_ci(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: async def run_services( ensure_service_runs_in_ci: EnvVarsDict, configure_registry_access: EnvVarsDict, - configure_schemas_location: EnvVarsDict, app: FastAPI, push_services, docker_swarm: None, @@ -59,7 +58,9 @@ async def push_start_services( number_comp: int, number_dyn: int, dependant=False ) -> list[dict[str, Any]]: pushed_services = await push_services( - number_comp, number_dyn, inter_dependent_services=dependant + number_of_computational_services=number_comp, + number_of_interactive_services=number_dyn, + inter_dependent_services=dependant, ) assert len(pushed_services) == (number_comp + number_dyn) for pushed_service in pushed_services: @@ -257,32 +258,32 @@ async def test_interactive_service_published_port(docker_network, run_services): assert docker_service.attrs["Spec"]["EndpointSpec"]["Mode"] == "dnsrr" -@pytest.fixture -def docker_network( - app_settings: ApplicationSettings, - docker_client: docker.client.DockerClient, - docker_swarm: None, -) -> Iterator[docker.models.networks.Network]: - network = docker_client.networks.create( - "test_network_default", driver="overlay", scope="swarm" - ) - print(f"--> docker network '{network.name}' created") - # TODO: should probably be done via monkeypatch actually... - app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = network.name - yield network - - # cleanup - print(f"<-- removing docker network '{network.name}'...") - network.remove() - - for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(1)): - with attempt: - list_networks = docker_client.networks.list( - app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME - ) - assert not list_networks - app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = None - print(f"<-- removed docker network '{network.name}'") +# @pytest.fixture +# def docker_network( +# app_settings: ApplicationSettings, +# docker_client: docker.client.DockerClient, +# docker_swarm: None, +# ) -> Iterator[docker.models.networks.Network]: +# network = docker_client.networks.create( +# "test_network_default", driver="overlay", scope="swarm" +# ) +# print(f"--> docker network '{network.name}' created") +# # TODO: should probably be done via monkeypatch actually... +# app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = network.name +# yield network + +# # cleanup +# print(f"<-- removing docker network '{network.name}'...") +# network.remove() + +# for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(1)): +# with attempt: +# list_networks = docker_client.networks.list( +# app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME +# ) +# assert not list_networks +# app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = None +# print(f"<-- removed docker network '{network.name}'") async def test_interactive_service_in_correct_network( diff --git a/services/director/tests/unit/test_registry_proxy.py b/services/director/tests/unit/test_registry_proxy.py index 4c60dcde667..566d46a5b91 100644 --- a/services/director/tests/unit/test_registry_proxy.py +++ b/services/director/tests/unit/test_registry_proxy.py @@ -13,7 +13,6 @@ async def test_list_no_services_available( app: FastAPI, docker_registry, configure_registry_access, - configure_schemas_location, ): computational_services = await registry_proxy.list_services( @@ -34,7 +33,6 @@ async def test_list_services_with_bad_json_formatting( app: FastAPI, docker_registry, configure_registry_access, - configure_schemas_location, push_services, ): # some services @@ -63,7 +61,6 @@ async def test_list_computational_services( docker_registry, push_services, configure_registry_access, - configure_schemas_location, ): await push_services( number_of_computational_services=6, number_of_interactive_services=3 @@ -80,7 +77,6 @@ async def test_list_interactive_services( docker_registry, push_services, configure_registry_access, - configure_schemas_location, ): await push_services( number_of_computational_services=5, number_of_interactive_services=4 @@ -96,7 +92,6 @@ async def test_list_of_image_tags( docker_registry, push_services, configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=5, number_of_interactive_services=3 @@ -119,7 +114,6 @@ async def test_list_interactive_service_dependencies( docker_registry, push_services, configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=2, @@ -151,7 +145,6 @@ async def test_get_image_labels( docker_registry, push_services, configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -225,7 +218,6 @@ async def test_get_image_details( app: FastAPI, push_services, configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -245,7 +237,6 @@ async def test_registry_caching( app: FastAPI, push_services, configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -277,7 +268,6 @@ async def test_generate_service_extras( app, push_services, configure_registry_access, - configure_schemas_location, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 diff --git a/services/osparc-gateway-server/tests/conftest.py b/services/osparc-gateway-server/tests/conftest.py index cb948ccb538..b7d545e4f0b 100644 --- a/services/osparc-gateway-server/tests/conftest.py +++ b/services/osparc-gateway-server/tests/conftest.py @@ -1,18 +1,16 @@ # pylint: disable=unused-argument # pylint: disable=redefined-outer-name -import asyncio +from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, Callable import aiodocker import pytest -from faker import Faker -from tenacity.asyncio import AsyncRetrying -from tenacity.stop import stop_after_delay -from tenacity.wait import wait_fixed -pytest_plugins = ["pytest_simcore.repository_paths", "pytest_simcore.docker_swarm"] +pytest_plugins = [ + "pytest_simcore.repository_paths", + "pytest_simcore.docker_swarm", +] @pytest.fixture(scope="session") @@ -26,39 +24,3 @@ def package_dir(osparc_simcore_services_dir: Path): async def async_docker_client() -> AsyncIterator[aiodocker.Docker]: async with aiodocker.Docker() as docker_client: yield docker_client - - -@pytest.fixture -async def docker_network( - async_docker_client: aiodocker.Docker, faker: Faker -) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: - networks = [] - - async def _network_creator(**network_config_kwargs) -> dict[str, Any]: - network = await async_docker_client.networks.create( - config={"Name": faker.uuid4(), "Driver": "overlay"} | network_config_kwargs - ) - assert network - print(f"--> created network {network=}") - networks.append(network) - return await network.show() - - yield _network_creator - - # wait until all networks are really gone - async def _wait_for_network_deletion(network: aiodocker.docker.DockerNetwork): - network_name = (await network.show())["Name"] - await network.delete() - async for attempt in AsyncRetrying( - reraise=True, wait=wait_fixed(1), stop=stop_after_delay(60) - ): - with attempt: - print(f"<-- waiting for network '{network_name}' deletion...") - list_of_network_names = [ - n["Name"] for n in await async_docker_client.networks.list() - ] - assert network_name not in list_of_network_names - print(f"<-- network '{network_name}' deleted") - - print(f"<-- removing all networks {networks=}") - await asyncio.gather(*[_wait_for_network_deletion(network) for network in networks]) From 9be31ed764c714a9b7620760c0f2bdfef4a3bdfc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:51:20 +0100 Subject: [PATCH 143/201] ongoing --- services/director/tests/unit/test_producer.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 64b07f3e672..279be983257 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -286,8 +286,29 @@ async def test_interactive_service_published_port(docker_network, run_services): # print(f"<-- removed docker network '{network.name}'") +@pytest.fixture +async def with_docker_network( + docker_network: Callable[..., Awaitable[dict[str, Any]]], +) -> dict[str, Any]: + return await docker_network() + + +@pytest.fixture +def configured_docker_network( + with_docker_network: dict[str, Any], + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + {"DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME": with_docker_network["Name"]}, + ) + + async def test_interactive_service_in_correct_network( - docker_network: docker.models.networks.Network, run_services + with_docker_network: dict[str, Any], + configured_docker_network: EnvVarsDict, + run_services, ): running_dynamic_services = await run_services( number_comp=0, number_dyn=2, dependant=False @@ -303,7 +324,8 @@ async def test_interactive_service_in_correct_network( assert len(list_of_services) == 1 docker_service = list_of_services[0] assert ( - docker_service.attrs["Spec"]["Networks"][0]["Target"] == docker_network.id + docker_service.attrs["Spec"]["Networks"][0]["Target"] + == with_docker_network["Id"] ) From bb2ac26cc5cac264ba9d2a4d1535883e8219b2e8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 6 Nov 2024 17:58:43 +0100 Subject: [PATCH 144/201] remove wrong usage --- services/director/tests/unit/test_producer.py | 93 ++++++++----------- 1 file changed, 37 insertions(+), 56 deletions(-) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 279be983257..bd123c2ea24 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -6,9 +6,9 @@ import json import uuid -from collections.abc import Callable +from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass -from typing import Any, AsyncIterator, Awaitable +from typing import Any import docker import docker.models.networks @@ -31,8 +31,10 @@ @pytest.fixture -def ensure_service_runs_in_ci(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: - return setenvs_from_dict( +def ensure_service_runs_in_ci( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( monkeypatch, envs={ "DEFAULT_MAX_MEMORY": f"{int(25 * pow(1024, 2))}", @@ -41,11 +43,31 @@ def ensure_service_runs_in_ci(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: ) +@pytest.fixture +async def with_docker_network( + docker_network: Callable[..., Awaitable[dict[str, Any]]], +) -> dict[str, Any]: + return await docker_network() + + +@pytest.fixture +def configured_docker_network( + with_docker_network: dict[str, Any], + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + {"DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME": with_docker_network["Name"]}, + ) + + @pytest.fixture async def run_services( ensure_service_runs_in_ci: EnvVarsDict, configure_registry_access: EnvVarsDict, app: FastAPI, + app_settings: ApplicationSettings, push_services, docker_swarm: None, user_id: UserID, @@ -116,7 +138,7 @@ async def push_start_services( ) node_details = await producer.get_service_details(app, service_uuid) print( - f"<-- {started_service['service_key']}:{started_service['service_version']} state is {node_details['service_state']} using {config.DEFAULT_MAX_MEMORY}Bytes, {config.DEFAULT_MAX_NANO_CPUS}nanocpus" + f"<-- {started_service['service_key']}:{started_service['service_version']} state is {node_details['service_state']} using {app_settings.DIRECTOR_DEFAULT_MAX_MEMORY}Bytes, {app_settings.DIRECTOR_DEFAULT_MAX_NANO_CPUS}nanocpus" ) for service in docker_client.services.list(): tasks = service.tasks() @@ -183,7 +205,7 @@ async def test_find_service_tag(): async def test_start_stop_service( - docker_network: docker.models.networks.Network, + configured_docker_network: EnvVarsDict, run_services: Callable[..., Awaitable[list[dict[str, Any]]]], ): # standard test @@ -191,7 +213,7 @@ async def test_start_stop_service( async def test_service_assigned_env_variables( - docker_network: docker.models.networks.Network, + configured_docker_network: EnvVarsDict, run_services: Callable[..., Awaitable[list[dict[str, Any]]]], user_id: UserID, project_id: ProjectID, @@ -233,7 +255,10 @@ async def test_service_assigned_env_variables( assert CPU_RESOURCE_LIMIT_KEY in envs_dict -async def test_interactive_service_published_port(docker_network, run_services): +async def test_interactive_service_published_port( + configured_docker_network: EnvVarsDict, + run_services, +): running_dynamic_services = await run_services(number_comp=0, number_dyn=1) assert len(running_dynamic_services) == 1 @@ -258,53 +283,6 @@ async def test_interactive_service_published_port(docker_network, run_services): assert docker_service.attrs["Spec"]["EndpointSpec"]["Mode"] == "dnsrr" -# @pytest.fixture -# def docker_network( -# app_settings: ApplicationSettings, -# docker_client: docker.client.DockerClient, -# docker_swarm: None, -# ) -> Iterator[docker.models.networks.Network]: -# network = docker_client.networks.create( -# "test_network_default", driver="overlay", scope="swarm" -# ) -# print(f"--> docker network '{network.name}' created") -# # TODO: should probably be done via monkeypatch actually... -# app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = network.name -# yield network - -# # cleanup -# print(f"<-- removing docker network '{network.name}'...") -# network.remove() - -# for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(1)): -# with attempt: -# list_networks = docker_client.networks.list( -# app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME -# ) -# assert not list_networks -# app_settings.DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME = None -# print(f"<-- removed docker network '{network.name}'") - - -@pytest.fixture -async def with_docker_network( - docker_network: Callable[..., Awaitable[dict[str, Any]]], -) -> dict[str, Any]: - return await docker_network() - - -@pytest.fixture -def configured_docker_network( - with_docker_network: dict[str, Any], - app_environment: EnvVarsDict, - monkeypatch: pytest.MonkeyPatch, -) -> EnvVarsDict: - return app_environment | setenvs_from_dict( - monkeypatch, - {"DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME": with_docker_network["Name"]}, - ) - - async def test_interactive_service_in_correct_network( with_docker_network: dict[str, Any], configured_docker_network: EnvVarsDict, @@ -329,7 +307,10 @@ async def test_interactive_service_in_correct_network( ) -async def test_dependent_services_have_common_network(docker_network, run_services): +async def test_dependent_services_have_common_network( + configured_docker_network: EnvVarsDict, + run_services, +): running_dynamic_services = await run_services( number_comp=0, number_dyn=2, dependant=True ) From cc1eb8531aa2d1e719371640bbed4bb0e03344c6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:45:20 +0100 Subject: [PATCH 145/201] added aioresponses --- services/director/requirements/_base.txt | 4 +-- services/director/requirements/_test.in | 1 + services/director/requirements/_test.txt | 35 ++++++++++++++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index 70d87951345..1382ab713d0 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -244,7 +244,7 @@ protobuf==5.28.3 # opentelemetry-proto psutil==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -pydantic==1.10.18 +pydantic==1.10.19 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -330,7 +330,7 @@ tenacity==9.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -tqdm==4.66.6 +tqdm==4.67.0 # via -r requirements/../../../packages/service-library/requirements/_base.in typer==0.12.5 # via diff --git a/services/director/requirements/_test.in b/services/director/requirements/_test.in index 004f8396e81..a6bd90a3acf 100644 --- a/services/director/requirements/_test.in +++ b/services/director/requirements/_test.in @@ -10,6 +10,7 @@ # testing asgi_lifespan +aioresponses docker faker jsonref diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 02cf32c761a..52e2969fff0 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -1,3 +1,18 @@ +aiohappyeyeballs==2.4.3 + # via + # -c requirements/_base.txt + # aiohttp +aiohttp==3.10.10 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # aioresponses +aioresponses==0.7.6 + # via -r requirements/_test.in +aiosignal==1.3.1 + # via + # -c requirements/_base.txt + # aiohttp anyio==4.6.2.post1 # via # -c requirements/_base.txt @@ -7,6 +22,7 @@ asgi-lifespan==2.1.0 attrs==24.2.0 # via # -c requirements/_base.txt + # aiohttp # pytest-docker certifi==2024.8.30 # via @@ -25,6 +41,11 @@ docker==7.1.0 # via -r requirements/_test.in faker==30.8.2 # via -r requirements/_test.in +frozenlist==1.5.0 + # via + # -c requirements/_base.txt + # aiohttp + # aiosignal h11==0.14.0 # via # -c requirements/_base.txt @@ -44,10 +65,16 @@ idna==3.10 # anyio # httpx # requests + # yarl iniconfig==2.0.0 # via pytest jsonref==1.1.0 # via -r requirements/_test.in +multidict==6.1.0 + # via + # -c requirements/_base.txt + # aiohttp + # yarl packaging==24.1 # via # -c requirements/_base.txt @@ -55,6 +82,10 @@ packaging==24.1 # pytest-sugar pluggy==1.5.0 # via pytest +propcache==0.2.0 + # via + # -c requirements/_base.txt + # yarl pytest==8.3.3 # via # -r requirements/_test.in @@ -112,3 +143,7 @@ urllib3==2.2.3 # -c requirements/_base.txt # docker # requests +yarl==1.17.1 + # via + # -c requirements/_base.txt + # aiohttp From f97dfa4095d22644c0b348da981c185543952cdc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:45:34 +0100 Subject: [PATCH 146/201] ensure docker_networks has a swarm --- packages/pytest-simcore/src/pytest_simcore/docker_swarm.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index 223a0b647e5..657f84f9667 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -396,7 +396,9 @@ async def _check_all_services_are_running(): @pytest.fixture async def docker_network( - async_docker_client: aiodocker.Docker, faker: Faker + docker_swarm: None, + async_docker_client: aiodocker.Docker, + faker: Faker, ) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: networks = [] From e3b3d42c9105a66a07b659791960a5be65b2e7e1 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:45:40 +0100 Subject: [PATCH 147/201] ruff --- .../src/pytest_simcore/simcore_service_library_fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py b/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py index fc85ee7a690..90104625cf0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_service_library_fixtures.py @@ -1,4 +1,4 @@ -from typing import AsyncIterable +from collections.abc import AsyncIterable import pytest from servicelib.async_utils import cancel_sequential_workers From cd22c3676c9f38eeb0904dbadc19defde95b4d5a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:45:51 +0100 Subject: [PATCH 148/201] note --- packages/service-library/src/servicelib/async_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/service-library/src/servicelib/async_utils.py b/packages/service-library/src/servicelib/async_utils.py index 3385ad5820e..42ba951aafb 100644 --- a/packages/service-library/src/servicelib/async_utils.py +++ b/packages/service-library/src/servicelib/async_utils.py @@ -39,6 +39,7 @@ class QueueElement: output: Any | None = None +# NOTE: If you get issues with event loop already closed error use ensure_run_in_sequence_context_is_empty fixture in your tests _sequential_jobs_contexts: dict[str, Context] = {} From bff77dc7f8442917d1d55a6825e3ac141f593efa Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:46:11 +0100 Subject: [PATCH 149/201] fixed api --- .../api/rest/_running_interactive_services.py | 47 +++++-------------- 1 file changed, 13 insertions(+), 34 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py index b3800c639c7..ae3a3c63670 100644 --- a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -9,9 +9,8 @@ from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID from servicelib.fastapi.dependencies import get_app -from simcore_service_director import exceptions -from ... import producer +from ... import exceptions, producer router = APIRouter() @@ -21,25 +20,20 @@ @router.get("/running_interactive_services") async def list_running_services( the_app: Annotated[FastAPI, Depends(get_app)], - user_id: UserID | None, - project_id: ProjectID | None, + user_id: UserID | None = None, + project_id: ProjectID | None = None, ) -> Envelope[list[dict[str, Any]]]: log.debug( "Client does list_running_services request user_id %s, project_id %s", user_id, project_id, ) - try: - services = await producer.get_services_details( - the_app, - f"{user_id}" if user_id else None, - f"{project_id}" if project_id else None, - ) - return Envelope[list[dict[str, Any]]](data=services) - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err + services = await producer.get_services_details( + the_app, + f"{user_id}" if user_id else None, + f"{project_id}" if project_id else None, + ) + return Envelope[list[dict[str, Any]]](data=services) @router.post( @@ -52,7 +46,7 @@ async def start_service( project_id: ProjectID, service_key: ServiceKey, service_uuid: UUID, - service_basepath: Path, + service_basepath: Path = Path(), service_tag: ServiceVersion | None = None, x_simcore_user_agent: str = Header(...), ) -> Envelope[dict[str, Any]]: @@ -78,11 +72,6 @@ async def start_service( x_simcore_user_agent, ) return Envelope[dict[str, Any]](data=service) - except exceptions.ServiceStartTimeoutError as err: - - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err except exceptions.ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" @@ -95,10 +84,6 @@ async def start_service( raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err @router.get("/running_interactive_services/{service_uuid}") @@ -117,10 +102,6 @@ async def get_running_service( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err @router.delete( @@ -137,13 +118,11 @@ async def stop_service( service_uuid, ) try: - await producer.stop_service(the_app, f"{service_uuid}", save_state) + await producer.stop_service( + the_app, node_uuid=f"{service_uuid}", save_state=save_state + ) except exceptions.ServiceUUIDNotFoundError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err From edc260efbb63fa1e556f94b17d4365217c9a9d8b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:46:24 +0100 Subject: [PATCH 150/201] use middleware --- .../api/rest/_service_extras.py | 4 ---- .../simcore_service_director/api/rest/_services.py | 13 ------------- 2 files changed, 17 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index cfcc19e221e..0aa30808dae 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -37,7 +37,3 @@ async def list_service_extras( raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 7e3bdb91d51..9e981177ba0 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -45,10 +45,6 @@ async def list_services( raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err @router.get("/services/{service_key}/{service_version}") @@ -77,10 +73,6 @@ async def get_service( raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err @router.get("/services/{service_key}/{service_version}/labels") @@ -109,8 +101,3 @@ async def list_service_labels( raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err - - except Exception as err: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"{err}" - ) from err From aa22b7debf81c16caf37ba06989a021dcf4cab8a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:46:46 +0100 Subject: [PATCH 151/201] ensure in sequential workers are properly closed --- .../director/src/simcore_service_director/core/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index c8aa7a8fc14..ae8467f902d 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -2,6 +2,7 @@ from typing import Final from fastapi import FastAPI +from servicelib.async_utils import cancel_sequential_workers from servicelib.fastapi.tracing import setup_tracing from .._meta import ( @@ -65,6 +66,7 @@ async def _on_startup() -> None: print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 async def _on_shutdown() -> None: + await cancel_sequential_workers() print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 app.add_event_handler("startup", _on_startup) From 0e36744f39b2fbe0599b049e511165b417ca2668 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:46:58 +0100 Subject: [PATCH 152/201] proper exception --- .../src/simcore_service_director/producer.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 15cd7491bb6..d246a919fb1 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -453,15 +453,11 @@ async def _create_docker_service_params( ] # attach the service to the swarm network dedicated to services - try: - swarm_network = await _get_swarm_network(client, app_settings=app_settings) - swarm_network_id = swarm_network["Id"] - swarm_network_name = swarm_network["Name"] - docker_params["networks"].append(swarm_network_id) - docker_params["labels"]["traefik.docker.network"] = swarm_network_name - - except exceptions.DirectorException: - log.exception("Could not find swarm network") + swarm_network = await _get_swarm_network(client, app_settings=app_settings) + swarm_network_id = swarm_network["Id"] + swarm_network_name = swarm_network["Name"] + docker_params["networks"].append(swarm_network_id) + docker_params["labels"]["traefik.docker.network"] = swarm_network_name # set labels for CPU and Memory limits nano_cpus_limit = str( @@ -524,7 +520,7 @@ async def _get_swarm_network( msg=( "Swarm network name is not configured, found following networks " "(if there is more then 1 network, remove the one which has no " - f"containers attached and all is fixed): {networks}" + f"containers attached and all is fixed): {networks if networks else 'no swarm network!'}" ) ) return networks[0] From d3be678357d33f435f207ddbd163a80d605b941f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:47:12 +0100 Subject: [PATCH 153/201] remove polution of logs --- services/director/src/simcore_service_director/registry_proxy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 1cdf0362315..cb661109d20 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -89,7 +89,6 @@ async def _basic_auth_registry_request( ) elif response.status == HTTPStatus.NOT_FOUND: - logger.exception("Path to registry not found: %s", url) raise exceptions.ServiceNotAvailableError(str(path)) elif response.status > 399: From 95f07a12c3488f7b32d5dc8a1566222550e6005c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:47:27 +0100 Subject: [PATCH 154/201] test passes --- .../test_rest_running_interactive_services.py | 158 +++++++++++------- 1 file changed, 102 insertions(+), 56 deletions(-) diff --git a/services/director/tests/unit/api/test_rest_running_interactive_services.py b/services/director/tests/unit/api/test_rest_running_interactive_services.py index f7ad8ea07b8..8b18ee8ca44 100644 --- a/services/director/tests/unit/api/test_rest_running_interactive_services.py +++ b/services/director/tests/unit/api/test_rest_running_interactive_services.py @@ -4,19 +4,22 @@ # pylint: disable=too-many-arguments import uuid +from collections.abc import Iterator import httpx import pytest +from aioresponses import CallbackResult, aioresponses from faker import Faker - -# from aioresponses.core import CallbackResult, aioresponses from fastapi import status from models_library.projects import ProjectID from models_library.users import UserID +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.async_utils import _sequential_jobs_contexts def _assert_response_and_unwrap_envelope(got: httpx.Response): - assert got.encoding == "application/json" + assert got.headers["content-type"] == "application/json" + assert got.encoding == "utf-8" body = got.json() assert isinstance(body, dict) @@ -28,12 +31,12 @@ def _assert_response_and_unwrap_envelope(got: httpx.Response): reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" ) async def test_running_services_post_and_delete_no_swarm( - configure_swarm_stack_name, + configure_swarm_stack_name: EnvVarsDict, client: httpx.AsyncClient, push_services, - user_id, - project_id, - api_version_prefix, + user_id: UserID, + project_id: ProjectID, + api_version_prefix: str, ): params = { "user_id": "None", @@ -48,15 +51,26 @@ async def test_running_services_post_and_delete_no_swarm( assert resp.status_code == 500, data +@pytest.fixture +def x_simcore_user_agent_header(faker: Faker) -> dict[str, str]: + return {"x-simcore-user-agent": faker.pystr()} + + +@pytest.fixture +def sequential_context_cleaner() -> Iterator[None]: + yield + _sequential_jobs_contexts.clear() + + @pytest.mark.parametrize( "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] ) async def test_running_services_post_and_delete( - configure_swarm_stack_name, - configure_registry_access, + configure_swarm_stack_name: EnvVarsDict, + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, client: httpx.AsyncClient, push_services, - docker_swarm: None, user_id: UserID, project_id: ProjectID, api_version_prefix: str, @@ -64,6 +78,8 @@ async def test_running_services_post_and_delete( expected_save_state_call: bool, mocker, faker: Faker, + x_simcore_user_agent_header: dict[str, str], + ensure_run_in_sequence_context_is_empty: None, ): params = {} resp = await client.post( @@ -85,14 +101,12 @@ async def test_running_services_post_and_delete( data = resp.json() assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, data - fake_headers = {"x-simcore-user-agent": faker.pystr()} - params["service_key"] = "simcore/services/comp/somfunkyname-nhsd" params["service_tag"] = "1.2.3" resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params, - headers=fake_headers, + headers=x_simcore_user_agent_header, ) data = resp.json() assert resp.status_code == status.HTTP_404_NOT_FOUND, data @@ -115,10 +129,11 @@ async def test_running_services_post_and_delete( resp = await client.post( f"/{api_version_prefix}/running_interactive_services", params=params, - headers=fake_headers, + headers=x_simcore_user_agent_header, ) - assert resp.status_code == status.HTTP_201_CREATED - assert resp.encoding == "application/json" + assert resp.status_code == status.HTTP_201_CREATED, resp.text + assert resp.encoding == "utf-8" + assert resp.headers["content-type"] == "application/json" running_service_enveloped = resp.json() assert isinstance(running_service_enveloped["data"], dict) assert all( @@ -158,7 +173,8 @@ async def test_running_services_post_and_delete( ) assert resp.status_code == status.HTTP_200_OK text = resp.text - assert resp.encoding == "application/json", f"Got {text=}" + assert resp.headers["content-type"] == "application/json" + assert resp.encoding == "utf-8", f"Got {text=}" running_service_enveloped = resp.json() assert isinstance(running_service_enveloped["data"], dict) assert all( @@ -193,55 +209,64 @@ async def test_running_services_post_and_delete( if save_state: query_params.update({"save_state": "true" if save_state else "false"}) - # TODO: replace with respx?? - # mocked_save_state_cb = mocker.MagicMock( - # return_value=CallbackResult(status=200, payload={}) - # ) - # PASSTHROUGH_REQUESTS_PREFIXES = [ - # "http://127.0.0.1", - # "http://localhost", - # "unix://", # docker engine - # "ws://", # websockets - # ] - # with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: + mocked_save_state_cb = mocker.MagicMock( + return_value=CallbackResult(status=200, payload={}) + ) + PASSTHROUGH_REQUESTS_PREFIXES = [ + "http://127.0.0.1", + "http://localhost", + "unix://", # docker engine + "ws://", # websockets + ] + with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - # # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- - # mock.post( - # f"http://{service_host}:{service_port}{service_basepath}/state", - # status=200, - # callback=mocked_save_state_cb, - # ) - # resp = await client.delete( - # f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", - # params=query_params, - # ) - # if expected_save_state_call: - # mocked_save_state_cb.assert_called_once() + # POST /http://service_host:service_port service_basepath/state ------------------------------------------------- + mock.post( + f"http://{service_host}:{service_port}{service_basepath}/state", + status=200, + callback=mocked_save_state_cb, + ) + resp = await client.delete( + f"/{api_version_prefix}/running_interactive_services/{params['service_uuid']}", + params=query_params, + ) + if expected_save_state_call: + mocked_save_state_cb.assert_called_once() - # text = resp.text - # assert resp.status_code == status.HTTP_204_NO_CONTENT, text - # assert resp.encoding == "application/json" - # data = resp.json() - # assert data is None + text = resp.text + assert resp.status_code == status.HTTP_204_NO_CONTENT, text + assert resp.headers["content-type"] == "application/json" + assert resp.encoding == "utf-8" async def test_running_interactive_services_list_get( - client: httpx.AsyncClient, push_services, docker_swarm + configure_swarm_stack_name: EnvVarsDict, + configure_registry_access: EnvVarsDict, + configured_docker_network: EnvVarsDict, + client: httpx.AsyncClient, + push_services, + x_simcore_user_agent_header: dict[str, str], + api_version_prefix: str, + ensure_run_in_sequence_context_is_empty: None, + faker: Faker, ): """Test case for running_interactive_services_list_get Returns a list of interactive services """ - user_ids = ["first_user_id", "second_user_id"] - project_ids = ["first_project_id", "second_project_id", "third_project_id"] + user_ids = [faker.pyint(min_value=1), faker.pyint(min_value=1)] + project_ids = [faker.uuid4(), faker.uuid4(), faker.uuid4()] # prepare services NUM_SERVICES = 1 - created_services = await push_services(0, NUM_SERVICES) - assert len(created_services) == NUM_SERVICES + available_services = await push_services( + number_of_computational_services=0, number_of_interactive_services=NUM_SERVICES + ) + assert len(available_services) == NUM_SERVICES # start the services + created_services = [] for user_id in user_ids: for project_id in project_ids: - for created_service in created_services: + for created_service in available_services: service_description = created_service["service_description"] params = {} params["user_id"] = user_id @@ -251,9 +276,12 @@ async def test_running_interactive_services_list_get( params["service_uuid"] = str(uuid.uuid4()) # start the service resp = await client.post( - "/v0/running_interactive_services", params=params + "/v0/running_interactive_services", + params=params, + headers=x_simcore_user_agent_header, ) - assert resp.status_code == 201 + assert resp.status_code == 201, resp.text + created_services.append(resp.json()["data"]) # get the list of services for user_id in user_ids: for project_id in project_ids: @@ -266,7 +294,7 @@ async def test_running_interactive_services_list_get( assert ( response.status_code == status.HTTP_200_OK ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) + data, error = _assert_response_and_unwrap_envelope(response) assert data assert not error services_list = data @@ -279,7 +307,7 @@ async def test_running_interactive_services_list_get( assert ( response.status_code == status.HTTP_200_OK ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) + data, error = _assert_response_and_unwrap_envelope(response) assert data assert not error services_list = data @@ -293,8 +321,26 @@ async def test_running_interactive_services_list_get( assert ( response.status_code == status.HTTP_200_OK ), f"Response body is : {response.text}" - data, error = _assert_response_and_unwrap_envelope(response.json()) + data, error = _assert_response_and_unwrap_envelope(response) assert data assert not error services_list = data assert len(services_list) == len(user_ids) * NUM_SERVICES + # get all the running services + response = await client.get("/v0/running_interactive_services") + assert ( + response.status_code == status.HTTP_200_OK + ), f"Response body is : {response.text}" + data, error = _assert_response_and_unwrap_envelope(response) + assert data + assert not error + services_list = data + assert len(services_list) == len(user_ids) * len(project_ids) * NUM_SERVICES + + # cleanup + for service in created_services: + resp = await client.delete( + f"/{api_version_prefix}/running_interactive_services/{service['service_uuid']}", + params={"save_state": False}, + ) + assert resp.status_code == status.HTTP_204_NO_CONTENT, resp.text From 78362745b1a209e06d0fec957133f15d4db3d774 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:47:40 +0100 Subject: [PATCH 155/201] more fixtures --- services/director/tests/unit/conftest.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py index 02211a16bc1..9bdafb00527 100644 --- a/services/director/tests/unit/conftest.py +++ b/services/director/tests/unit/conftest.py @@ -3,8 +3,9 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Awaitable, Callable from pathlib import Path +from typing import Any import pytest import simcore_service_director @@ -26,6 +27,7 @@ "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", "pytest_simcore.repository_paths", + "pytest_simcore.simcore_service_library_fixtures", ] @@ -151,3 +153,22 @@ async def app( shutdown_timeout=None if is_pdb_enabled else MAX_TIME_FOR_APP_TO_SHUTDOWN, ): yield the_test_app + + +@pytest.fixture +async def with_docker_network( + docker_network: Callable[..., Awaitable[dict[str, Any]]], +) -> dict[str, Any]: + return await docker_network() + + +@pytest.fixture +def configured_docker_network( + with_docker_network: dict[str, Any], + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + {"DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME": with_docker_network["Name"]}, + ) From b36a22878be6ccb7d5aabcd13e9674bb1430c1ce Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:48:36 +0100 Subject: [PATCH 156/201] cleanup --- services/director/tests/unit/api/conftest.py | 8 +++++++- .../api/test_rest_running_interactive_services.py | 13 ------------- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/services/director/tests/unit/api/conftest.py b/services/director/tests/unit/api/conftest.py index c1d010bb3a2..e295a9dacd1 100644 --- a/services/director/tests/unit/api/conftest.py +++ b/services/director/tests/unit/api/conftest.py @@ -7,6 +7,7 @@ import httpx import pytest +from faker import Faker from fastapi import FastAPI from fixtures.fake_services import PushServicesCallable, ServiceInRegistryInfoDict from httpx._transports.asgi import ASGITransport @@ -17,7 +18,7 @@ async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: # - Needed for app to trigger start/stop event handlers # - Prefer this client instead of fastapi.testclient.TestClient async with httpx.AsyncClient( - app=app, + transport=ASGITransport(app=app), base_url="http://director.testserver.io", headers={"Content-Type": "application/json"}, ) as client: @@ -32,3 +33,8 @@ async def created_services( return await push_services( number_of_computational_services=3, number_of_interactive_services=2 ) + + +@pytest.fixture +def x_simcore_user_agent_header(faker: Faker) -> dict[str, str]: + return {"x-simcore-user-agent": faker.pystr()} diff --git a/services/director/tests/unit/api/test_rest_running_interactive_services.py b/services/director/tests/unit/api/test_rest_running_interactive_services.py index 8b18ee8ca44..7f2820ee68d 100644 --- a/services/director/tests/unit/api/test_rest_running_interactive_services.py +++ b/services/director/tests/unit/api/test_rest_running_interactive_services.py @@ -4,7 +4,6 @@ # pylint: disable=too-many-arguments import uuid -from collections.abc import Iterator import httpx import pytest @@ -14,7 +13,6 @@ from models_library.projects import ProjectID from models_library.users import UserID from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.async_utils import _sequential_jobs_contexts def _assert_response_and_unwrap_envelope(got: httpx.Response): @@ -51,17 +49,6 @@ async def test_running_services_post_and_delete_no_swarm( assert resp.status_code == 500, data -@pytest.fixture -def x_simcore_user_agent_header(faker: Faker) -> dict[str, str]: - return {"x-simcore-user-agent": faker.pystr()} - - -@pytest.fixture -def sequential_context_cleaner() -> Iterator[None]: - yield - _sequential_jobs_contexts.clear() - - @pytest.mark.parametrize( "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] ) From dd642be4bc366da09c1eb666670b52b881322921 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:49:43 +0100 Subject: [PATCH 157/201] cleanup --- services/director/tests/unit/test_producer.py | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index bd123c2ea24..53f80d1f0ad 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -11,7 +11,6 @@ from typing import Any import docker -import docker.models.networks import pytest from fastapi import FastAPI from models_library.projects import ProjectID @@ -43,25 +42,6 @@ def ensure_service_runs_in_ci( ) -@pytest.fixture -async def with_docker_network( - docker_network: Callable[..., Awaitable[dict[str, Any]]], -) -> dict[str, Any]: - return await docker_network() - - -@pytest.fixture -def configured_docker_network( - with_docker_network: dict[str, Any], - app_environment: EnvVarsDict, - monkeypatch: pytest.MonkeyPatch, -) -> EnvVarsDict: - return app_environment | setenvs_from_dict( - monkeypatch, - {"DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME": with_docker_network["Name"]}, - ) - - @pytest.fixture async def run_services( ensure_service_runs_in_ci: EnvVarsDict, From 1b9a8df1abcdfcef7d3f228d25565206f6db7d55 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 15:32:28 +0100 Subject: [PATCH 158/201] missing registry configuration --- .../tests/unit/api/test_rest_service_extras.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/services/director/tests/unit/api/test_rest_service_extras.py b/services/director/tests/unit/api/test_rest_service_extras.py index 16cd76e254f..eb46cc924ea 100644 --- a/services/director/tests/unit/api/test_rest_service_extras.py +++ b/services/director/tests/unit/api/test_rest_service_extras.py @@ -8,10 +8,12 @@ import httpx from fastapi import status from fixtures.fake_services import ServiceInRegistryInfoDict +from pytest_simcore.helpers.typing_env import EnvVarsDict def _assert_response_and_unwrap_envelope(got: httpx.Response): - assert got.encoding == "application/json" + assert got.headers["content-type"] == "application/json" + assert got.encoding == "utf-8" body = got.json() assert isinstance(body, dict) @@ -20,12 +22,14 @@ def _assert_response_and_unwrap_envelope(got: httpx.Response): async def test_get_services_extras_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + api_version_prefix: str, ): resp = await client.get( f"/{api_version_prefix}/service_extras/whatever/someversion" ) - assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" ) @@ -37,6 +41,7 @@ async def test_get_services_extras_by_key_and_version_with_empty_registry( async def test_get_services_extras_by_key_and_version( + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, created_services: list[ServiceInRegistryInfoDict], api_version_prefix: str, @@ -54,6 +59,6 @@ async def test_get_services_extras_by_key_and_version( assert resp.status_code == status.HTTP_200_OK, f"Got {resp.text=}" - service_extras, error = _assert_response_and_unwrap_envelope(resp.json()) + service_extras, error = _assert_response_and_unwrap_envelope(resp) assert not error assert created_service["service_extras"] == service_extras From 5d4dd417671766e337ccd677d3016a8674159e6f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 15:32:38 +0100 Subject: [PATCH 159/201] fix api --- .../src/simcore_service_director/api/rest/_service_extras.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index 0aa30808dae..a750553e191 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -13,7 +13,7 @@ log = logging.getLogger(__name__) -@router.get("/service_extras/{service_key}/{service_version}") +@router.get("/service_extras/{service_key:path}/{service_version}") async def list_service_extras( the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, From 932af7597ac94777fa046247d1dd744e46364b01 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 15:43:33 +0100 Subject: [PATCH 160/201] fixed syntax --- .../src/simcore_service_director/api/rest/_services.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 9e981177ba0..95b9c5d1592 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -47,7 +47,7 @@ async def list_services( ) from err -@router.get("/services/{service_key}/{service_version}") +@router.get("/services/{service_key:path}/{service_version}") async def get_service( the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, @@ -75,7 +75,7 @@ async def get_service( ) from err -@router.get("/services/{service_key}/{service_version}/labels") +@router.get("/services/{service_key:path}/{service_version}/labels") async def list_service_labels( the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, From 69bee43ec9d5a520a62031b6cc8db1085f78bbf2 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 15:43:45 +0100 Subject: [PATCH 161/201] ruff --- .../src/simcore_service_director/registry_proxy.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index cb661109d20..c622c0b6cb3 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -416,10 +416,11 @@ async def list_interactive_service_dependencies( if DEPENDENCIES_LABEL_KEY in image_labels: try: dependencies = json.loads(image_labels[DEPENDENCIES_LABEL_KEY]) - for dependency in dependencies: - dependency_keys.append( - {"key": dependency["key"], "tag": dependency["tag"]} - ) + dependency_keys = [ + {"key": dependency["key"], "tag": dependency["tag"]} + for dependency in dependencies + ] + except json.decoder.JSONDecodeError: logging.exception( "Incorrect json formatting in %s, skipping...", @@ -525,7 +526,6 @@ async def get_service_extras( invalid_with_msg = f"invalid type for resource [{entry_value}]" # discrete resources (custom made ones) --- - # TODO: this could be adjusted to separate between GPU and/or VRAM # check if the service requires GPU support if not invalid_with_msg and _validate_kind(entry, "VRAM"): From bab906f688ccfae571b417d7c60512ccfe16102f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:05:32 +0100 Subject: [PATCH 162/201] missing fixture --- .../tests/unit/api/test_rest_services.py | 32 ++++++++++++------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/services/director/tests/unit/api/test_rest_services.py b/services/director/tests/unit/api/test_rest_services.py index b2db82e7902..0244bacbf27 100644 --- a/services/director/tests/unit/api/test_rest_services.py +++ b/services/director/tests/unit/api/test_rest_services.py @@ -9,10 +9,12 @@ from fastapi import status from fixtures.fake_services import ServiceInRegistryInfoDict from models_library.api_schemas_director.services import ServiceDataGet +from pytest_simcore.helpers.typing_env import EnvVarsDict def _assert_response_and_unwrap_envelope(got: httpx.Response): - assert got.encoding == "application/json" + assert got.headers["content-type"] == "application/json" + assert got.encoding == "utf-8" body = got.json() assert isinstance(body, dict) @@ -43,6 +45,7 @@ def _assert_services( async def test_list_services_with_empty_registry( docker_registry: str, + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, api_version_prefix: str, ): @@ -52,7 +55,7 @@ async def test_list_services_with_empty_registry( resp = await client.get(f"/{api_version_prefix}/services") assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp.json()) + services, error = _assert_response_and_unwrap_envelope(resp) assert not error assert isinstance(services, list) @@ -61,6 +64,7 @@ async def test_list_services_with_empty_registry( async def test_list_services( docker_registry: str, + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, created_services: list[ServiceInRegistryInfoDict], api_version_prefix: str, @@ -70,7 +74,7 @@ async def test_list_services( resp = await client.get(f"/{api_version_prefix}/services") assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp.json()) + services, error = _assert_response_and_unwrap_envelope(resp) assert not error assert isinstance(services, list) @@ -79,6 +83,7 @@ async def test_list_services( async def test_get_service_bad_request( docker_registry: str, + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, created_services: list[ServiceInRegistryInfoDict], api_version_prefix: str, @@ -87,15 +92,16 @@ async def test_get_service_bad_request( assert len(created_services) > 0 resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") - assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp.json()) + services, error = _assert_response_and_unwrap_envelope(resp) assert not services assert error async def test_list_services_by_service_type( docker_registry: str, + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, created_services: list[ServiceInRegistryInfoDict], api_version_prefix: str, @@ -108,7 +114,7 @@ async def test_list_services_by_service_type( ) assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp.json()) + services, error = _assert_response_and_unwrap_envelope(resp) assert not error assert services assert len(services) == 3 @@ -116,17 +122,19 @@ async def test_list_services_by_service_type( resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp.json()) + services, error = _assert_response_and_unwrap_envelope(resp) assert not error assert services assert len(services) == 2 async def test_get_services_by_key_and_version_with_empty_registry( - client: httpx.AsyncClient, api_version_prefix: str + configure_registry_access: EnvVarsDict, + client: httpx.AsyncClient, + api_version_prefix: str, ): resp = await client.get(f"/{api_version_prefix}/services/whatever/someversion") - assert resp.status_code == status.HTTP_400_BAD_REQUEST, f"Got f{resp.text}" + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" @@ -140,6 +148,7 @@ async def test_get_services_by_key_and_version_with_empty_registry( async def test_get_services_by_key_and_version( + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, created_services: list[ServiceInRegistryInfoDict], api_version_prefix: str, @@ -158,7 +167,7 @@ async def test_get_services_by_key_and_version( assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp.json()) + services, error = _assert_response_and_unwrap_envelope(resp) assert not error assert isinstance(services, list) assert len(services) == 1 @@ -169,6 +178,7 @@ async def test_get_services_by_key_and_version( async def test_get_service_labels( + configure_registry_access: EnvVarsDict, client: httpx.AsyncClient, created_services: list[ServiceInRegistryInfoDict], api_version_prefix: str, @@ -185,7 +195,7 @@ async def test_get_service_labels( resp = await client.get(url) assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" - labels, error = _assert_response_and_unwrap_envelope(resp.json()) + labels, error = _assert_response_and_unwrap_envelope(resp) assert not error assert service["docker_labels"] == labels From 22f672fca1d1311a3ca543113286a5c7d86f6983 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:32:11 +0100 Subject: [PATCH 163/201] api tests passing --- .../api/rest/_services.py | 65 ++++++++++++------- .../unit/api/test_rest_service_extras.py | 2 +- .../tests/unit/api/test_rest_services.py | 10 ++- 3 files changed, 47 insertions(+), 30 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 95b9c5d1592..927cbdfb36d 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -5,6 +5,7 @@ from models_library.generics import Envelope from models_library.services_enums import ServiceType from models_library.services_types import ServiceKey, ServiceVersion +from pydantic import BaseModel from servicelib.fastapi.dependencies import get_app from ... import exceptions, registry_proxy @@ -14,7 +15,24 @@ log = logging.getLogger(__name__) -@router.get("/services") +class _ErrorMessage(BaseModel): + message: str + + +@router.get( + "/services", + response_model=Envelope[list[dict[str, Any]]], + responses={ + status.HTTP_401_UNAUTHORIZED: { + "model": _ErrorMessage, + "description": "Could not connect with Docker Registry", + }, + status.HTTP_500_INTERNAL_SERVER_ERROR: { + "model": _ErrorMessage, + "description": "Unexpected error", + }, + }, +) async def list_services( the_app: Annotated[FastAPI, Depends(get_app)], service_type: ServiceType | None = None, @@ -29,11 +47,11 @@ async def list_services( services = await registry_proxy.list_services( the_app, registry_proxy.ServiceType.ALL ) - elif "computational" in service_type: + elif service_type is ServiceType.COMPUTATIONAL: services = await registry_proxy.list_services( the_app, registry_proxy.ServiceType.COMPUTATIONAL ) - elif "interactive" in service_type: + elif service_type is ServiceType.DYNAMIC: services = await registry_proxy.list_services( the_app, registry_proxy.ServiceType.DYNAMIC ) @@ -47,56 +65,57 @@ async def list_services( ) from err -@router.get("/services/{service_key:path}/{service_version}") -async def get_service( +# NOTE: be careful that /labels must be defined before the more generic get_service +@router.get("/services/{service_key:path}/{service_version}/labels") +async def list_service_labels( the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -) -> Envelope[list[dict[str, Any]]]: +) -> Envelope[dict[str, Any]]: log.debug( - "Client does get_service with service_key %s, service_version %s", + "Retrieving service labels with service_key %s, service_version %s", service_key, service_version, ) try: - services = [ - await registry_proxy.get_image_details( - the_app, service_key, service_version - ) - ] - return Envelope[list[dict[str, Any]]](data=services) + service_labels, _ = await registry_proxy.get_image_labels( + the_app, service_key, service_version + ) + return Envelope[dict[str, Any]](data=service_labels) + except exceptions.ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err + except exceptions.RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err -@router.get("/services/{service_key:path}/{service_version}/labels") -async def list_service_labels( +@router.get("/services/{service_key:path}/{service_version}") +async def get_service( the_app: Annotated[FastAPI, Depends(get_app)], service_key: ServiceKey, service_version: ServiceVersion, -) -> Envelope[dict[str, Any]]: +) -> Envelope[list[dict[str, Any]]]: log.debug( - "Retrieving service labels with service_key %s, service_version %s", + "Client does get_service with service_key %s, service_version %s", service_key, service_version, ) try: - service_labels, _ = await registry_proxy.get_image_labels( - the_app, service_key, service_version - ) - return Envelope[dict[str, Any]](data=service_labels) - + services = [ + await registry_proxy.get_image_details( + the_app, service_key, service_version + ) + ] + return Envelope[list[dict[str, Any]]](data=services) except exceptions.ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except exceptions.RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" diff --git a/services/director/tests/unit/api/test_rest_service_extras.py b/services/director/tests/unit/api/test_rest_service_extras.py index eb46cc924ea..8b8bba037c3 100644 --- a/services/director/tests/unit/api/test_rest_service_extras.py +++ b/services/director/tests/unit/api/test_rest_service_extras.py @@ -33,7 +33,7 @@ async def test_get_services_extras_by_key_and_version_with_empty_registry( resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/someversion" ) - assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" + assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" resp = await client.get( f"/{api_version_prefix}/service_extras/simcore/services/dynamic/something/1.5.2" ) diff --git a/services/director/tests/unit/api/test_rest_services.py b/services/director/tests/unit/api/test_rest_services.py index 0244bacbf27..a34ec6a76d4 100644 --- a/services/director/tests/unit/api/test_rest_services.py +++ b/services/director/tests/unit/api/test_rest_services.py @@ -94,9 +94,7 @@ async def test_get_service_bad_request( resp = await client.get(f"/{api_version_prefix}/services?service_type=blahblah") assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" - services, error = _assert_response_and_unwrap_envelope(resp) - assert not services - assert error + # NOTE: only successful errors are enveloped async def test_list_services_by_service_type( @@ -119,7 +117,7 @@ async def test_list_services_by_service_type( assert services assert len(services) == 3 - resp = await client.get(f"/{api_version_prefix}/services?service_type=interactive") + resp = await client.get(f"/{api_version_prefix}/services?service_type=dynamic") assert resp.status_code == status.HTTP_200_OK, f"Got f{resp.text}" services, error = _assert_response_and_unwrap_envelope(resp) @@ -137,12 +135,12 @@ async def test_get_services_by_key_and_version_with_empty_registry( assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, f"Got f{resp.text}" resp = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/someversion" + f"/{api_version_prefix}/simcore/services/dynamic/something/someversion" ) assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" resp = await client.get( - f"/{api_version_prefix}/services/simcore/services/dynamic/something/1.5.2" + f"/{api_version_prefix}/simcore/services/dynamic/something/1.5.2" ) assert resp.status_code == status.HTTP_404_NOT_FOUND, f"Got f{resp.text}" From 207d33217333afa11517fc214e6b1f2bfacfe10e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:35:25 +0100 Subject: [PATCH 164/201] fixture adjustments --- services/director/tests/unit/test_producer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 53f80d1f0ad..4e935c73b6a 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -221,11 +221,11 @@ async def test_service_assigned_env_variables( assert "STORAGE_ENDPOINT" in envs_dict assert "SIMCORE_USER_ID" in envs_dict - assert envs_dict["SIMCORE_USER_ID"] == user_id + assert envs_dict["SIMCORE_USER_ID"] == f"{user_id}" assert "SIMCORE_NODE_UUID" in envs_dict assert envs_dict["SIMCORE_NODE_UUID"] == service_uuid assert "SIMCORE_PROJECT_ID" in envs_dict - assert envs_dict["SIMCORE_PROJECT_ID"] == project_id + assert envs_dict["SIMCORE_PROJECT_ID"] == f"{project_id}" assert "SIMCORE_NODE_BASEPATH" in envs_dict assert envs_dict["SIMCORE_NODE_BASEPATH"] == service["service_basepath"] assert "SIMCORE_HOST_NAME" in envs_dict From 3d46690eae9471601bbf0fad14fc934f231f1392 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:45:23 +0100 Subject: [PATCH 165/201] test passing --- services/director/tests/unit/test_producer.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 4e935c73b6a..96ad487fcfa 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -185,6 +185,7 @@ async def test_find_service_tag(): async def test_start_stop_service( + configure_registry_access: EnvVarsDict, configured_docker_network: EnvVarsDict, run_services: Callable[..., Awaitable[list[dict[str, Any]]]], ): @@ -193,6 +194,7 @@ async def test_start_stop_service( async def test_service_assigned_env_variables( + configure_registry_access: EnvVarsDict, configured_docker_network: EnvVarsDict, run_services: Callable[..., Awaitable[list[dict[str, Any]]]], user_id: UserID, @@ -236,6 +238,7 @@ async def test_service_assigned_env_variables( async def test_interactive_service_published_port( + configure_registry_access: EnvVarsDict, configured_docker_network: EnvVarsDict, run_services, ): @@ -264,6 +267,7 @@ async def test_interactive_service_published_port( async def test_interactive_service_in_correct_network( + configure_registry_access: EnvVarsDict, with_docker_network: dict[str, Any], configured_docker_network: EnvVarsDict, run_services, @@ -288,6 +292,7 @@ async def test_interactive_service_in_correct_network( async def test_dependent_services_have_common_network( + configure_registry_access: EnvVarsDict, configured_docker_network: EnvVarsDict, run_services, ): @@ -339,6 +344,7 @@ def registry_settings(app_settings: ApplicationSettings) -> RegistrySettings: ], ) async def test_get_service_key_version_from_docker_service( + configure_registry_access: EnvVarsDict, registry_settings: RegistrySettings, fake_service: FakeDockerService, ): @@ -371,6 +377,7 @@ async def test_get_service_key_version_from_docker_service( ], ) async def test_get_service_key_version_from_docker_service_except_invalid_keys( + configure_registry_access: EnvVarsDict, registry_settings: RegistrySettings, fake_service_str: str, ): From a11a5176c9ea41ceaa99ed197ba61b283d690cd3 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:53:35 +0100 Subject: [PATCH 166/201] passing tests --- .../tests/unit/test_registry_proxy.py | 59 +++++-------------- 1 file changed, 15 insertions(+), 44 deletions(-) diff --git a/services/director/tests/unit/test_registry_proxy.py b/services/director/tests/unit/test_registry_proxy.py index 566d46a5b91..31138fac03d 100644 --- a/services/director/tests/unit/test_registry_proxy.py +++ b/services/director/tests/unit/test_registry_proxy.py @@ -6,13 +6,13 @@ import pytest from fastapi import FastAPI +from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director import registry_proxy async def test_list_no_services_available( + configure_registry_access: EnvVarsDict, app: FastAPI, - docker_registry, - configure_registry_access, ): computational_services = await registry_proxy.list_services( @@ -29,38 +29,10 @@ async def test_list_no_services_available( assert not all_services -async def test_list_services_with_bad_json_formatting( - app: FastAPI, - docker_registry, - configure_registry_access, - push_services, -): - # some services - created_services = await push_services( - number_of_computational_services=3, - number_of_interactive_services=2, - bad_json_format=True, - ) - assert len(created_services) == 5 - computational_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.COMPUTATIONAL - ) - assert not computational_services # it's empty - interactive_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.DYNAMIC - ) - assert not interactive_services - all_services = await registry_proxy.list_services( - app, registry_proxy.ServiceType.ALL - ) - assert not all_services - - async def test_list_computational_services( + configure_registry_access: EnvVarsDict, app: FastAPI, - docker_registry, push_services, - configure_registry_access, ): await push_services( number_of_computational_services=6, number_of_interactive_services=3 @@ -73,10 +45,9 @@ async def test_list_computational_services( async def test_list_interactive_services( + configure_registry_access: EnvVarsDict, app: FastAPI, - docker_registry, push_services, - configure_registry_access, ): await push_services( number_of_computational_services=5, number_of_interactive_services=4 @@ -88,10 +59,9 @@ async def test_list_interactive_services( async def test_list_of_image_tags( + configure_registry_access: EnvVarsDict, app: FastAPI, - docker_registry, push_services, - configure_registry_access, ): images = await push_services( number_of_computational_services=5, number_of_interactive_services=3 @@ -110,10 +80,9 @@ async def test_list_of_image_tags( async def test_list_interactive_service_dependencies( + configure_registry_access: EnvVarsDict, app: FastAPI, - docker_registry, push_services, - configure_registry_access, ): images = await push_services( number_of_computational_services=2, @@ -141,10 +110,9 @@ async def test_list_interactive_service_dependencies( async def test_get_image_labels( + configure_registry_access: EnvVarsDict, app: FastAPI, - docker_registry, push_services, - configure_registry_access, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -215,9 +183,9 @@ def test_get_service_last_namess(): async def test_get_image_details( + configure_registry_access: EnvVarsDict, app: FastAPI, push_services, - configure_registry_access, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -234,9 +202,9 @@ async def test_get_image_details( async def test_registry_caching( + configure_registry_access: EnvVarsDict, app: FastAPI, push_services, - configure_registry_access, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 @@ -255,7 +223,10 @@ async def test_registry_caching( @pytest.mark.skip(reason="test needs credentials to real registry") -async def test_get_services_performance(app, loop, configure_custom_registry): +async def test_get_services_performance( + configure_registry_access: EnvVarsDict, + app: FastAPI, +): start_time = time.perf_counter() services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) stop_time = time.perf_counter() @@ -265,9 +236,9 @@ async def test_get_services_performance(app, loop, configure_custom_registry): async def test_generate_service_extras( - app, + configure_registry_access: EnvVarsDict, + app: FastAPI, push_services, - configure_registry_access, ): images = await push_services( number_of_computational_services=1, number_of_interactive_services=1 From 4b58480f44e83499436a68b75a5e93cb6ff6892b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 17:59:48 +0100 Subject: [PATCH 167/201] use docker utils --- .../src/simcore_service_director/constants.py | 2 + .../src/simcore_service_director/producer.py | 6 +-- .../src/simcore_service_director/utils.py | 29 -------------- services/director/tests/unit/test_utils.py | 39 ------------------- 4 files changed, 4 insertions(+), 72 deletions(-) delete mode 100644 services/director/src/simcore_service_director/utils.py delete mode 100644 services/director/tests/unit/test_utils.py diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py index 291d073a70a..9ce6ea81a44 100644 --- a/services/director/src/simcore_service_director/constants.py +++ b/services/director/src/simcore_service_director/constants.py @@ -24,3 +24,5 @@ # TO remove # used when in devel mode vs release mode NODE_SCHEMA_LOCATION: Final[str] = f"{API_ROOT}/v0/schemas/node-meta-v0.0.1.json" + +DATETIME_FORMAT: Final[str] = "%Y-%m-%dT%H:%M:%S.%f" diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index d246a919fb1..a6d503e4bf9 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -25,6 +25,7 @@ from fastapi import FastAPI from packaging.version import Version from servicelib.async_utils import run_sequentially_in_context +from servicelib.docker_utils import to_datetime from settings_library.docker_registry import RegistrySettings from tenacity import retry from tenacity.retry import retry_if_exception_type @@ -45,7 +46,6 @@ from .instrumentation import get_instrumentation from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw -from .utils import parse_as_datetime log = logging.getLogger(__name__) @@ -702,9 +702,7 @@ async def _get_service_state( elif task_state in ("running"): now = arrow.utcnow().datetime # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' - task_state_update_time = parse_as_datetime( - last_task["Status"]["Timestamp"], default=now - ) + task_state_update_time = to_datetime(last_task["Status"]["Timestamp"]) time_since_running = now - task_state_update_time log.debug("Now is %s, time since running mode is %s", now, time_since_running) diff --git a/services/director/src/simcore_service_director/utils.py b/services/director/src/simcore_service_director/utils.py deleted file mode 100644 index ad07d27d1b3..00000000000 --- a/services/director/src/simcore_service_director/utils.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging -from datetime import datetime - -import arrow - -log = logging.getLogger(__name__) - -DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" -_MAXLEN = len("2020-10-09T12:28:14.7710") - - -def parse_as_datetime(timestr: str, *, default: datetime | None = None) -> datetime: - """ - default: if parsing is not possible, it returs default - - """ - # datetime_str is typically '2020-10-09T12:28:14.771034099Z' - # - The T separates the date portion from the time-of-day portion - # - The Z on the end means UTC, that is, an offset-from-UTC - # The 099 before the Z is not clear, therefore we will truncate the last part - - try: - return arrow.get(timestr).datetime - - except ValueError as err: - log.debug("Failed to parse %s: %s", timestr, err) - if default is not None: - return default - raise diff --git a/services/director/tests/unit/test_utils.py b/services/director/tests/unit/test_utils.py deleted file mode 100644 index c9126ca0a4d..00000000000 --- a/services/director/tests/unit/test_utils.py +++ /dev/null @@ -1,39 +0,0 @@ -from datetime import datetime - -import arrow -import pytest -from simcore_service_director.utils import parse_as_datetime - - -@pytest.mark.parametrize( - "timestr", - [ - # Samples taken from https://docs.docker.com/engine/reference/commandline/service_inspect/ - "2020-10-09T18:44:02.558012087Z", - "2020-10-09T12:28:14.771034099Z", - "2020-10-09T12:28:14.7710", - # found cases with spaces - "2020-10-09T12:28:14.77 Z", - " 2020-10-09T12:28:14.77 ", - ], -) -def test_parse_valid_time_strings(timestr): - - dt = parse_as_datetime(timestr) - assert isinstance(dt, datetime) - assert dt.year == 2020 - assert dt.month == 10 - assert dt.day == 9 - - -def test_parse_invalid_timestr(): - now = arrow.utcnow().datetime - invalid_timestr = "2020-10-09T12:28" - - # w/ default, it should NOT raise - dt = parse_as_datetime(invalid_timestr, default=now) - assert dt == now - - # w/o default - with pytest.raises(ValueError): - parse_as_datetime(invalid_timestr) From 5de6955d2f04375f44e5cfc5f7f9e09653f37a33 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:01:05 +0100 Subject: [PATCH 168/201] test with no swarm is not necessary --- .../director/tests/unit/test_docker_utils.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/services/director/tests/unit/test_docker_utils.py b/services/director/tests/unit/test_docker_utils.py index c4588620ffc..498199108c9 100644 --- a/services/director/tests/unit/test_docker_utils.py +++ b/services/director/tests/unit/test_docker_utils.py @@ -6,8 +6,6 @@ from asyncio import sleep from collections.abc import Callable -import pytest -from aiodocker.exceptions import DockerError from simcore_service_director import docker_utils @@ -30,21 +28,6 @@ async def test_docker_client(): await container.delete(force=True) -@pytest.mark.parametrize( - "fct", - [ - (docker_utils.swarm_get_number_nodes), - (docker_utils.swarm_has_manager_nodes), - (docker_utils.swarm_has_worker_nodes), - ], -) -async def test_swarm_method_with_no_swarm(fct: Callable): - # if this fails on your development machine run - # `docker swarm leave --force` to leave the swarm - with pytest.raises(DockerError): - await fct() - - async def test_swarm_get_number_nodes(docker_swarm: None): num_nodes = await docker_utils.swarm_get_number_nodes() assert num_nodes == 1 From 7b7afd4af814c21a94ebf161417ddc73f0bc7ed7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:01:45 +0100 Subject: [PATCH 169/201] no need to test fixtures --- services/director/tests/unit/test_docker_utils.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/services/director/tests/unit/test_docker_utils.py b/services/director/tests/unit/test_docker_utils.py index 498199108c9..81ad8299f31 100644 --- a/services/director/tests/unit/test_docker_utils.py +++ b/services/director/tests/unit/test_docker_utils.py @@ -4,7 +4,6 @@ # pylint:disable=too-many-arguments # pylint: disable=not-async-context-manager from asyncio import sleep -from collections.abc import Callable from simcore_service_director import docker_utils @@ -39,12 +38,3 @@ async def test_swarm_has_manager_nodes(docker_swarm: None): async def test_swarm_has_worker_nodes(docker_swarm: None): assert (await docker_utils.swarm_has_worker_nodes()) is False - - -async def test_push_services( - push_services: Callable, - configure_registry_access: None, -): - await push_services( - number_of_computational_services=3, number_of_interactive_services=3 - ) From 8bf9f8da6e28c6404fc4971c02138088ec5d35fb Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:03:40 +0100 Subject: [PATCH 170/201] cleanup --- services/director/tests/helpers/__init__.py | 0 .../tests/helpers/json_schema_validator.py | 29 -------- .../tests/unit/test_dummy_services.py | 22 ------ .../director/tests/unit/test_json_schemas.py | 29 -------- services/director/tests/unit/test_oas.py | 20 ------ .../tests/unit/test_registry_cache_task.py | 67 ------------------- 6 files changed, 167 deletions(-) delete mode 100644 services/director/tests/helpers/__init__.py delete mode 100644 services/director/tests/helpers/json_schema_validator.py delete mode 100644 services/director/tests/unit/test_dummy_services.py delete mode 100644 services/director/tests/unit/test_json_schemas.py delete mode 100644 services/director/tests/unit/test_oas.py delete mode 100644 services/director/tests/unit/test_registry_cache_task.py diff --git a/services/director/tests/helpers/__init__.py b/services/director/tests/helpers/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/services/director/tests/helpers/json_schema_validator.py b/services/director/tests/helpers/json_schema_validator.py deleted file mode 100644 index 25088a192b8..00000000000 --- a/services/director/tests/helpers/json_schema_validator.py +++ /dev/null @@ -1,29 +0,0 @@ -import json -import logging -from pathlib import Path - -# NOTE: currently uses draft04 version -from jsonschema import SchemaError, ValidationError, validate - -_logger = logging.getLogger(__name__) - - -def validate_instance_object(json_instance: dict, json_schema: dict): - try: - validate(json_instance, json_schema) - except ValidationError: - _logger.exception("Node validation error:") - raise - except SchemaError: - _logger.exception("Schema validation error:") - raise - - -def validate_instance_path(json_instance: Path, json_schema: Path): - with json_instance.open() as file_pointer: - instance = json.load(file_pointer) - - with json_schema.open() as file_pointer: - schema = json.load(file_pointer) - - validate_instance_object(instance, schema) diff --git a/services/director/tests/unit/test_dummy_services.py b/services/director/tests/unit/test_dummy_services.py deleted file mode 100644 index 255d563e6c0..00000000000 --- a/services/director/tests/unit/test_dummy_services.py +++ /dev/null @@ -1,22 +0,0 @@ -# pylint: disable=bare-except -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-import - - -# from helpers import json_schema_validator - -# log = logging.getLogger(__name__) - - -# async def test_services_conformity(configure_schemas_location, push_services): -# from simcore_service_director import resources - -# services = await push_services(1, 1) -# with resources.stream(resources.RESOURCE_NODE_SCHEMA) as file_pt: -# service_schema = json.load(file_pt) -# for service in services: -# # validate service -# json_schema_validator.validate_instance_object( -# service["service_description"], service_schema -# ) diff --git a/services/director/tests/unit/test_json_schemas.py b/services/director/tests/unit/test_json_schemas.py deleted file mode 100644 index 37d68c62f09..00000000000 --- a/services/director/tests/unit/test_json_schemas.py +++ /dev/null @@ -1,29 +0,0 @@ -import json -from pathlib import Path - -import pytest -from jsonschema import SchemaError, ValidationError, validate -from simcore_service_director import resources - -API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) - - -def validate_individual_schemas(list_of_paths): - for spec_file_path in list_of_paths: - assert spec_file_path.exists() - with spec_file_path.open() as file_ptr: - schema_specs = json.load(file_ptr) - try: - dummy_instance = {} - with pytest.raises(ValidationError): - validate(dummy_instance, schema_specs) - except SchemaError as err: - pytest.fail(err.message) - - -@pytest.mark.parametrize("version", API_VERSIONS) -def test_valid_individual_json_schemas_specs(version): - name = f"{resources.RESOURCE_OPENAPI_ROOT}/{version}/schemas" - schemas_folder_path = resources.get_path(name) - - validate_individual_schemas(Path(schemas_folder_path).rglob("*.json")) diff --git a/services/director/tests/unit/test_oas.py b/services/director/tests/unit/test_oas.py deleted file mode 100644 index 186f3819737..00000000000 --- a/services/director/tests/unit/test_oas.py +++ /dev/null @@ -1,20 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -import yaml -from simcore_service_director import resources - - -def test_server_specs(): - with resources.stream(resources.RESOURCE_OPEN_API) as fh: - specs = yaml.safe_load(fh) - - # client-sdk current limitation - # - hooks to first server listed in oas - default_server = specs["servers"][0] - assert ( - default_server["url"] == "http://{host}:{port}/{version}" - ), "Invalid convention" diff --git a/services/director/tests/unit/test_registry_cache_task.py b/services/director/tests/unit/test_registry_cache_task.py deleted file mode 100644 index 6d611e92a32..00000000000 --- a/services/director/tests/unit/test_registry_cache_task.py +++ /dev/null @@ -1,67 +0,0 @@ -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name - - -# TODO: replace with aiocache - done already -# from simcore_service_director import config, main, registry_cache_task, registry_proxy - - -# @pytest.fixture -# def client( -# loop, -# aiohttp_client, -# aiohttp_unused_port, -# configure_schemas_location, -# configure_registry_access, -# ): -# config.DIRECTOR_REGISTRY_CACHING = True -# config.DIRECTOR_REGISTRY_CACHING_TTL = 5 -# # config.DIRECTOR_REGISTRY_CACHING_TTL = 5 -# app = main.setup_app() -# server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} - -# registry_cache_task.setup(app) - -# return loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) - - -# async def test_registry_caching_task(client, push_services): -# app = client.app -# assert app - -# # check the task is started -# assert registry_cache_task.TASK_NAME in app -# # check the registry cache is empty (no calls yet) -# assert registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY in app - -# # check we do not get any repository -# list_of_services = await registry_proxy.list_services( -# app, registry_proxy.ServiceType.ALL -# ) -# assert not list_of_services -# assert app[registry_cache_task.APP_REGISTRY_CACHE_DATA_KEY] != {} -# # create services in the registry -# pushed_services = await push_services( -# number_of_computational_services=1, number_of_interactive_services=1 -# ) -# # the services shall be updated -# await sleep( -# config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 -# ) # NOTE: this can take some time. Sleep increased by 10%. -# list_of_services = await registry_proxy.list_services( -# app, registry_proxy.ServiceType.ALL -# ) -# assert len(list_of_services) == 2 -# # add more -# pushed_services = await push_services( -# number_of_computational_services=2, -# number_of_interactive_services=2, -# version="2.0.", -# ) -# await sleep( -# config.DIRECTOR_REGISTRY_CACHING_TTL * 1.1 -# ) # NOTE: this sometimes takes a bit more. Sleep increased a 10%. -# list_of_services = await registry_proxy.list_services( -# app, registry_proxy.ServiceType.ALL -# ) -# assert len(list_of_services) == len(pushed_services) From 307d95ba96e9d61b2b9d1692f941555b16ab686b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:05:48 +0100 Subject: [PATCH 171/201] cleanup --- services/director/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/director/setup.py b/services/director/setup.py index 4d8fdfcc7e6..4522abb4e1d 100644 --- a/services/director/setup.py +++ b/services/director/setup.py @@ -58,8 +58,8 @@ def read_reqs(reqs_path: Path) -> set[str]: "extras_require": {"test": TEST_REQUIREMENTS}, "entry_points": { "console_scripts": [ - "simcore-service-director = simcore_service_director.__main__:main", - "simcore-service = simcore_service_director.__main__:main", + "simcore-service-director = simcore_service_director.cli:main", + "simcore-service = simcore_service_director.cli:main", ], }, } From 2606d9e1dc264bc095e818620b83d14852183f90 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:08:34 +0100 Subject: [PATCH 172/201] unused --- .../src/simcore_service_director/resources.py | 36 ------------------- 1 file changed, 36 deletions(-) delete mode 100644 services/director/src/simcore_service_director/resources.py diff --git a/services/director/src/simcore_service_director/resources.py b/services/director/src/simcore_service_director/resources.py deleted file mode 100644 index d1fd28bd513..00000000000 --- a/services/director/src/simcore_service_director/resources.py +++ /dev/null @@ -1,36 +0,0 @@ -import functools -from pathlib import Path - -import pkg_resources - -from .constants import NODE_SCHEMA_LOCATION - -RESOURCE_OPENAPI_ROOT: str = "api" -RESOURCE_OPEN_API: str = f"{RESOURCE_OPENAPI_ROOT}/v0/openapi.yaml" -RESOURCE_NODE_SCHEMA: str = NODE_SCHEMA_LOCATION - -""" - List of pkg_resources functions *bound* to current package with the following signature - - function(resource_name) - - Note that resource names must be /-separated paths and - cannot be absolute (i.e. no leading /) or contain relative names like "..". - Do not use os.path routines to manipulate resource paths, as they are not filesystem paths. - - Resources are read/only files/folders -""" -exists = functools.partial(pkg_resources.resource_exists, __name__) -stream = functools.partial(pkg_resources.resource_stream, __name__) -listdir = functools.partial(pkg_resources.resource_listdir, __name__) -isdir = functools.partial(pkg_resources.resource_isdir, __name__) - - -def get_path(resource_name: str) -> Path: - """Returns a path to a resource - - WARNING: existence of file is not guaranteed. Use resources.exists - WARNING: resource files are supposed to be used as read-only! - """ - resource_path = Path(pkg_resources.resource_filename(__name__, resource_name)) - return resource_path From 57b5315a2efca35aa5e44f3b2d0abd6b82a3306b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:14:47 +0100 Subject: [PATCH 173/201] chatgpt tests --- .../director/tests/unit/test_system_utils.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 services/director/tests/unit/test_system_utils.py diff --git a/services/director/tests/unit/test_system_utils.py b/services/director/tests/unit/test_system_utils.py new file mode 100644 index 00000000000..522549cf561 --- /dev/null +++ b/services/director/tests/unit/test_system_utils.py @@ -0,0 +1,46 @@ +from unittest.mock import mock_open, patch + +from simcore_service_director.system_utils import get_system_extra_hosts_raw + + +# Sample tests +def test_get_system_extra_hosts_raw_with_matching_domain(): + # Simulate the contents of /etc/hosts + mocked_hosts_content = "127.0.0.1\tlocalhost\n192.168.1.1\texample.com\n" + extra_host_domain = "example.com" + + with patch("pathlib.Path.open", mock_open(read_data=mocked_hosts_content)), patch( + "pathlib.Path.exists", return_value=True + ): + result = get_system_extra_hosts_raw(extra_host_domain) + assert result == ["192.168.1.1 example.com"] + + +def test_get_system_extra_hosts_raw_with_no_matching_domain(): + mocked_hosts_content = "127.0.0.1\tlocalhost\n192.168.1.1\texample.com\n" + extra_host_domain = "nonexistent.com" + + with patch("pathlib.Path.open", mock_open(read_data=mocked_hosts_content)), patch( + "pathlib.Path.exists", return_value=True + ): + result = get_system_extra_hosts_raw(extra_host_domain) + assert result == [] + + +def test_get_system_extra_hosts_raw_with_undefined_domain(): + mocked_hosts_content = "127.0.0.1\tlocalhost\n192.168.1.1\texample.com\n" + extra_host_domain = "undefined" + + with patch("pathlib.Path.open", mock_open(read_data=mocked_hosts_content)), patch( + "pathlib.Path.exists", return_value=True + ): + result = get_system_extra_hosts_raw(extra_host_domain) + assert result == [] + + +def test_get_system_extra_hosts_raw_with_no_hosts_file(): + extra_host_domain = "example.com" + + with patch("pathlib.Path.exists", return_value=False): + result = get_system_extra_hosts_raw(extra_host_domain) + assert result == [] From 0457aaa9c061a4d586a74fea4394aa3762b1b3ec Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:43:33 +0100 Subject: [PATCH 174/201] removed exceptions, replaced by errors --- .../api/rest/_running_interactive_services.py | 18 ++- .../api/rest/_service_extras.py | 7 +- .../api/rest/_services.py | 13 ++- .../simcore_service_director/core/errors.py | 30 ++++- .../simcore_service_director/exceptions.py | 87 --------------- .../src/simcore_service_director/producer.py | 104 +++++++++--------- .../registry_proxy.py | 32 +++--- services/director/tests/unit/test_producer.py | 17 ++- 8 files changed, 134 insertions(+), 174 deletions(-) delete mode 100644 services/director/src/simcore_service_director/exceptions.py diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py index ae3a3c63670..e79d4f70f8f 100644 --- a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -10,7 +10,13 @@ from models_library.users import UserID from servicelib.fastapi.dependencies import get_app -from ... import exceptions, producer +from ... import producer +from ...core.errors import ( + RegistryConnectionError, + ServiceNotAvailableError, + ServiceUUIDInUseError, + ServiceUUIDNotFoundError, +) router = APIRouter() @@ -72,15 +78,15 @@ async def start_service( x_simcore_user_agent, ) return Envelope[dict[str, Any]](data=service) - except exceptions.ServiceNotAvailableError as err: + except ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except exceptions.ServiceUUIDInUseError as err: + except ServiceUUIDInUseError as err: raise HTTPException( status_code=status.HTTP_409_CONFLICT, detail=f"{err}" ) from err - except exceptions.RegistryConnectionError as err: + except RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err @@ -98,7 +104,7 @@ async def get_running_service( try: service = await producer.get_service_details(the_app, f"{service_uuid}") return Envelope[dict[str, Any]](data=service) - except exceptions.ServiceUUIDNotFoundError as err: + except ServiceUUIDNotFoundError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err @@ -122,7 +128,7 @@ async def stop_service( the_app, node_uuid=f"{service_uuid}", save_state=save_state ) - except exceptions.ServiceUUIDNotFoundError as err: + except ServiceUUIDNotFoundError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index a750553e191..5d1d069bb09 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -6,7 +6,8 @@ from models_library.services_types import ServiceKey, ServiceVersion from servicelib.fastapi.dependencies import get_app -from ... import exceptions, registry_proxy +from ... import registry_proxy +from ...core.errors import RegistryConnectionError, ServiceNotAvailableError router = APIRouter() @@ -29,11 +30,11 @@ async def list_service_extras( the_app, service_key, service_version ) return Envelope[dict[str, Any]](data=service_extras) - except exceptions.ServiceNotAvailableError as err: + except ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except exceptions.RegistryConnectionError as err: + except RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index 927cbdfb36d..d9b2799a5dd 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -8,7 +8,8 @@ from pydantic import BaseModel from servicelib.fastapi.dependencies import get_app -from ... import exceptions, registry_proxy +from ... import registry_proxy +from ...core.errors import RegistryConnectionError, ServiceNotAvailableError router = APIRouter() @@ -59,7 +60,7 @@ async def list_services( # NOTE2: the catalog will directly talk to the registry see case #2165 [https://github.com/ITISFoundation/osparc-simcore/issues/2165] # services = node_validator.validate_nodes(services) return Envelope[list[dict[str, Any]]](data=services) - except exceptions.RegistryConnectionError as err: + except RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err @@ -83,12 +84,12 @@ async def list_service_labels( ) return Envelope[dict[str, Any]](data=service_labels) - except exceptions.ServiceNotAvailableError as err: + except ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except exceptions.RegistryConnectionError as err: + except RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err @@ -112,11 +113,11 @@ async def get_service( ) ] return Envelope[list[dict[str, Any]]](data=services) - except exceptions.ServiceNotAvailableError as err: + except ServiceNotAvailableError as err: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"{err}" ) from err - except exceptions.RegistryConnectionError as err: + except RegistryConnectionError as err: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f"{err}" ) from err diff --git a/services/director/src/simcore_service_director/core/errors.py b/services/director/src/simcore_service_director/core/errors.py index 2664f266da7..ebbf885451b 100644 --- a/services/director/src/simcore_service_director/core/errors.py +++ b/services/director/src/simcore_service_director/core/errors.py @@ -7,8 +7,36 @@ class DirectorRuntimeError(OsparcErrorMixin, RuntimeError): def __init__(self, **ctx: Any) -> None: super().__init__(**ctx) - msg_template: str = "Director-v0 unexpected error" + msg_template: str = "Director-v0 unexpected error: {msg}" class ConfigurationError(DirectorRuntimeError): msg_template: str = "Application misconfiguration: {msg}" + + +class GenericDockerError(DirectorRuntimeError): + msg_template: str = "Docker error: {err}" + + +class ServiceNotAvailableError(DirectorRuntimeError): + msg_template: str = "Service {service_name}:{service_tag} is not available" + + +class ServiceUUIDNotFoundError(DirectorRuntimeError): + msg_template: str = "Service with uuid {service_uuid} was not found" + + +class ServiceUUIDInUseError(DirectorRuntimeError): + msg_template: str = "Service with uuid {service_uuid} is already in use" + + +class ServiceStateSaveError(DirectorRuntimeError): + msg_template: str = "Failed to save state of service {service_uuid}: {reason}" + + +class RegistryConnectionError(DirectorRuntimeError): + msg_template: str = "Unexpected connection error while accessing registry: {msg}" + + +class ServiceStartTimeoutError(DirectorRuntimeError): + msg_template: str = "Service {service_name}:{service_uuid} failed to start in time" diff --git a/services/director/src/simcore_service_director/exceptions.py b/services/director/src/simcore_service_director/exceptions.py deleted file mode 100644 index cdb25145cb2..00000000000 --- a/services/director/src/simcore_service_director/exceptions.py +++ /dev/null @@ -1,87 +0,0 @@ -""" Defines the different exceptions that may arise in the director - - -TODO: Exceptions should provide all info to create Error instances of the API model -For instance, assume there is a ficticious exception class FieldValidationError, then it would -translate into something like - -// response - 422 -{ - "error": { - "status": 422, - "error": "FIELDS_VALIDATION_ERROR", - "description": "One or more fields raised validation errors." - "fields": { - "email": "Invalid email address.", - "password": "Password too short." - } - } -} -""" - -from typing import Optional - -from aiodocker.exceptions import DockerError - - -class DirectorException(Exception): - """Basic exception""" - - def __init__(self, msg: Optional[str] = None): - super().__init__(msg or "Unexpected error was triggered") - - -class GenericDockerError(DirectorException): - """Generic docker library error""" - - def __init__(self, msg: str, original_exception: DockerError): - super().__init__(msg + f": {original_exception.message}") - self.original_exception = original_exception - - -class ServiceNotAvailableError(DirectorException): - """Service not found""" - - def __init__(self, service_name: str, service_tag: Optional[str] = None): - service_tag = service_tag or "UNDEFINED" - super().__init__(f"The service {service_name}:{service_tag} does not exist") - self.service_name = service_name - self.service_tag = service_tag - - -class ServiceUUIDNotFoundError(DirectorException): - """Service not found""" - - def __init__(self, service_uuid: str): - super().__init__(f"The service with uuid {service_uuid} was not found") - self.service_uuid = service_uuid - - -class ServiceUUIDInUseError(DirectorException): - """Service UUID is already in use""" - - def __init__(self, service_uuid: str): - super().__init__(f"The service uuid {service_uuid} is already in use") - self.service_uuid = service_uuid - - -class ServiceStateSaveError(DirectorException): - def __init__(self, service_uuid: str, reason: str): - super().__init__(f"Failed to save state of service {service_uuid}: {reason}") - self.service_uuid = service_uuid - - -class RegistryConnectionError(DirectorException): - """Error while connecting to the docker regitry""" - - def __init__(self, msg: str): - super().__init__(msg or "Unexpected connection error while accessing registry") - - -class ServiceStartTimeoutError(DirectorException): - """The service was created but never run (time-out)""" - - def __init__(self, service_name: str, service_uuid: str): - super().__init__(f"Service {service_name}:{service_uuid} failed to start ") - self.service_name = service_name - self.service_uuid = service_uuid diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index a6d503e4bf9..2fbbf371054 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -32,7 +32,7 @@ from tenacity.stop import stop_after_attempt from tenacity.wait import wait_fixed -from . import docker_utils, exceptions, registry_proxy +from . import docker_utils, registry_proxy from .client_session import get_client_session from .constants import ( CPU_RESOURCE_LIMIT_KEY, @@ -41,8 +41,16 @@ SERVICE_RUNTIME_BOOTSETTINGS, SERVICE_RUNTIME_SETTINGS, ) +from .core.errors import ( + DirectorRuntimeError, + GenericDockerError, + ServiceNotAvailableError, + ServiceStartTimeoutError, + ServiceStateSaveError, + ServiceUUIDInUseError, + ServiceUUIDNotFoundError, +) from .core.settings import ApplicationSettings, get_application_settings -from .exceptions import ServiceStateSaveError from .instrumentation import get_instrumentation from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw @@ -78,19 +86,18 @@ async def _check_node_uuid_available( "label": f"{_to_simcore_runtime_docker_label_key('node_id')}={node_uuid}" } ) - except aiodocker.exceptions.DockerError as err: - log.exception("Error while retrieving services list") + except aiodocker.DockerError as err: msg = "Error while retrieving services" - raise exceptions.GenericDockerError(msg, err) from err + raise GenericDockerError(err=msg) from err if list_of_running_services_w_uuid: - raise exceptions.ServiceUUIDInUseError(node_uuid) + raise ServiceUUIDInUseError(service_uuid=node_uuid) log.debug("UUID %s is free", node_uuid) def _check_setting_correctness(setting: dict) -> None: if "name" not in setting or "type" not in setting or "value" not in setting: msg = f"Invalid setting in {setting}" - raise exceptions.DirectorException(msg) + raise DirectorRuntimeError(msg=msg) def _parse_mount_settings(settings: list[dict]) -> list[dict]: @@ -516,7 +523,7 @@ async def _get_swarm_network( if "swarm" in x["Scope"] and network_name in x["Name"] ] if not networks or len(networks) > 1: - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg=( "Swarm network name is not configured, found following networks " "(if there is more then 1 network, remove the one which has no " @@ -614,10 +621,9 @@ async def _create_overlay_network_in_swarm( node_uuid, ) return cast(str, docker_network.id) - except aiodocker.exceptions.DockerError as err: - log.exception("Error while creating network for service %s", service_name) + except aiodocker.DockerError as err: msg = "Error while creating network" - raise exceptions.GenericDockerError(msg, err) from err + raise GenericDockerError(err=msg) from err async def _remove_overlay_network_of_swarm( @@ -640,12 +646,9 @@ async def _remove_overlay_network_of_swarm( docker_network = aiodocker.networks.DockerNetwork(client, network["Id"]) await docker_network.delete() log.debug("Removed %s networks with uuid %s", len(networks), node_uuid) - except aiodocker.exceptions.DockerError as err: - log.exception( - "Error while removing networks for service with uuid: %s", node_uuid - ) + except aiodocker.DockerError as err: msg = "Error while removing networks" - raise exceptions.GenericDockerError(msg, err) from err + raise GenericDockerError(err=msg) from err async def _get_service_state( @@ -738,7 +741,9 @@ async def _wait_until_service_running_or_failed( log.error( "Error while waiting for service with %s", last_task["Status"] ) - raise exceptions.ServiceStartTimeoutError(service_name, node_uuid) + raise ServiceStartTimeoutError( + service_name=service_name, service_uuid=node_uuid + ) if task_state in ("running", "complete"): break # allows dealing with other events instead of wasting time here @@ -753,7 +758,7 @@ async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[ } log.debug("entries %s", list_of_images) if not list_of_images[service_key]: - raise exceptions.ServiceNotAvailableError(service_key) + raise ServiceNotAvailableError(service_name=service_key) log.debug( "Service %s has the following list of images available: %s", @@ -790,7 +795,7 @@ async def _find_service_tag( list_of_images: dict, service_key: str, service_tag: str | None ) -> str: if service_key not in list_of_images: - raise exceptions.ServiceNotAvailableError( + raise ServiceNotAvailableError( service_name=service_key, service_tag=service_tag ) # filter incorrect chars @@ -799,13 +804,15 @@ async def _find_service_tag( available_tags_list = sorted(filtered_tags_list, key=Version) # not tags available... probably an undefined service there... if not available_tags_list: - raise exceptions.ServiceNotAvailableError(service_key, service_tag) + raise ServiceNotAvailableError( + service_name=service_key, service_tag=service_tag + ) tag = service_tag if not service_tag or service_tag == "latest": # get latest tag tag = available_tags_list[len(available_tags_list) - 1] elif available_tags_list.count(service_tag) != 1: - raise exceptions.ServiceNotAvailableError( + raise ServiceNotAvailableError( service_name=service_key, service_tag=service_tag ) @@ -854,7 +861,7 @@ async def _start_docker_service( if "ID" not in service: # error while starting service msg = f"Error while starting service: {service!s}" - raise exceptions.DirectorException(msg) + raise DirectorRuntimeError(msg=msg) log.debug("Service started now waiting for it to run") # get the full info from docker @@ -903,18 +910,20 @@ async def _start_docker_service( "project_id": project_id, } - except exceptions.ServiceStartTimeoutError: + except ServiceStartTimeoutError: log.exception("Service failed to start") await _silent_service_cleanup(app, node_uuid) raise - except aiodocker.exceptions.DockerError as err: + except aiodocker.DockerError as err: log.exception("Unexpected error") await _silent_service_cleanup(app, node_uuid) - raise exceptions.ServiceNotAvailableError(service_key, service_tag) from err + raise ServiceNotAvailableError( + service_name=service_key, service_tag=service_tag + ) from err async def _silent_service_cleanup(app: FastAPI, node_uuid: str) -> None: - with contextlib.suppress(exceptions.DirectorException): + with contextlib.suppress(DirectorRuntimeError): await stop_service(app, node_uuid=node_uuid, save_state=False) @@ -971,7 +980,7 @@ async def _get_service_key_version_from_docker_service( ) -> tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) if not service_full_name.startswith(registry_settings.resolved_registry_url): - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg=f"Invalid service '{service_full_name}', it is missing {registry_settings.resolved_registry_url}" ) @@ -980,7 +989,7 @@ async def _get_service_key_version_from_docker_service( ].strip("/") service_re_match = _SERVICE_KEY_REGEX.match(service_full_name) if not service_re_match: - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg=f"Invalid service '{service_full_name}', it does not follow pattern '{_SERVICE_KEY_REGEX.pattern}'" ) service_key = service_re_match.group("key") @@ -1125,14 +1134,9 @@ async def get_services_details( await _get_node_details(app, client, dict(service)) for service in list_running_services ] - except aiodocker.exceptions.DockerError as err: - log.exception( - "Error while listing services with user_id, study_id %s, %s", - user_id, - study_id, - ) - msg = "Error while accessing container" - raise exceptions.GenericDockerError(msg, err) from err + except aiodocker.DockerError as err: + msg = f"Error while accessing container for {user_id=}, {study_id=}" + raise GenericDockerError(err=msg) from err async def get_service_details(app: FastAPI, node_uuid: str) -> dict: @@ -1150,21 +1154,20 @@ async def get_service_details(app: FastAPI, node_uuid: str) -> dict: ) # error if no service with such an id exists if not list_running_services_with_uuid: - raise exceptions.ServiceUUIDNotFoundError(node_uuid) + raise ServiceUUIDNotFoundError(service_uuid=node_uuid) if len(list_running_services_with_uuid) > 1: # someone did something fishy here - raise exceptions.DirectorException( + raise DirectorRuntimeError( msg="More than one docker service is labeled as main service" ) return await _get_node_details( app, client, dict(list_running_services_with_uuid[0]) ) - except aiodocker.exceptions.DockerError as err: - log.exception("Error while accessing container with uuid: %s", node_uuid) - msg = "Error while accessing container" - raise exceptions.GenericDockerError(msg, err) from err + except aiodocker.DockerError as err: + msg = f"Error while accessing container {node_uuid=}" + raise GenericDockerError(err=msg) from err @retry( @@ -1232,14 +1235,13 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non ] } ) - except aiodocker.exceptions.DockerError as err: - log.exception("Error while stopping container with uuid: %s", node_uuid) - msg = "Error while stopping container" - raise exceptions.GenericDockerError(msg, err) from err + except aiodocker.DockerError as err: + msg = f"Error while stopping container {node_uuid=}" + raise GenericDockerError(err=msg) from err # error if no service with such an id exists if not list_running_services_with_uuid: - raise exceptions.ServiceUUIDNotFoundError(node_uuid) + raise ServiceUUIDNotFoundError(service_uuid=node_uuid) log.debug("found service(s) with uuid %s", list_running_services_with_uuid) @@ -1268,7 +1270,7 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non ) except ClientResponseError as err: raise ServiceStateSaveError( - node_uuid, + service_uuid=node_uuid, reason=f"service {service_host_name} rejected to save state, " f"responded {err.message} (status {err.status})." "Aborting stop service to prevent data loss.", @@ -1289,9 +1291,9 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non log.debug("removing %s", service["Spec"]["Name"]) await client.services.delete(service["Spec"]["Name"]) - except aiodocker.exceptions.DockerError as err: - msg = "Error while removing services" - raise exceptions.GenericDockerError(msg, err) from err + except aiodocker.DockerError as err: + msg = f"Error while removing services {node_uuid=}" + raise GenericDockerError(err=msg) from err # remove network(s) log.debug("removed services, now removing network...") diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index c622c0b6cb3..c6b5c676f7c 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -18,13 +18,17 @@ from tenacity.wait import wait_fixed from yarl import URL -from . import exceptions from .client_session import get_client_session from .constants import ( DIRECTOR_SIMCORE_SERVICES_PREFIX, ORG_LABELS_TO_SCHEMA_LABELS, SERVICE_RUNTIME_SETTINGS, ) +from .core.errors import ( + DirectorRuntimeError, + RegistryConnectionError, + ServiceNotAvailableError, +) from .core.settings import ApplicationSettings, get_application_settings DEPENDENCIES_LABEL_KEY: str = "simcore.service.dependencies" @@ -51,7 +55,7 @@ async def _basic_auth_registry_request( app_settings = get_application_settings(app) if not app_settings.DIRECTOR_REGISTRY.REGISTRY_URL: msg = "URL to registry is not defined" - raise exceptions.DirectorException(msg) + raise DirectorRuntimeError(msg=msg) url = URL( f"{'https' if app_settings.DIRECTOR_REGISTRY.REGISTRY_SSL else 'http'}://{app_settings.DIRECTOR_REGISTRY.REGISTRY_URL}{path}" @@ -89,13 +93,13 @@ async def _basic_auth_registry_request( ) elif response.status == HTTPStatus.NOT_FOUND: - raise exceptions.ServiceNotAvailableError(str(path)) + raise ServiceNotAvailableError(service_name=path) elif response.status > 399: logger.exception( "Unknown error while accessing registry: %s", str(response) ) - raise exceptions.RegistryConnectionError(str(response)) + raise RegistryConnectionError(msg=str(response)) else: # registry that does not need an auth @@ -106,7 +110,7 @@ async def _basic_auth_registry_request( except client_exceptions.ClientError as exc: logger.exception("Unknown error while accessing registry") msg = f"Unknown error while accessing registry: {exc!s}" - raise exceptions.DirectorException(msg) from exc + raise DirectorRuntimeError(msg=msg) from exc async def _auth_registry_request( @@ -123,7 +127,7 @@ async def _auth_registry_request( or not app_settings.DIRECTOR_REGISTRY.REGISTRY_PW ): msg = "Wrong configuration: Authentication to registry is needed!" - raise exceptions.RegistryConnectionError(msg) + raise RegistryConnectionError(msg=msg) # auth issue let's try some authentication get the auth type auth_type = None auth_details: dict[str, str] = {} @@ -137,7 +141,7 @@ async def _auth_registry_request( break if not auth_type: msg = "Unknown registry type: cannot deduce authentication method!" - raise exceptions.RegistryConnectionError(msg) + raise RegistryConnectionError(msg=msg) auth = BasicAuth( login=app_settings.DIRECTOR_REGISTRY.REGISTRY_USER, password=app_settings.DIRECTOR_REGISTRY.REGISTRY_PW.get_secret_value(), @@ -152,7 +156,7 @@ async def _auth_registry_request( async with session.get(token_url, auth=auth, **kwargs) as token_resp: if token_resp.status != HTTPStatus.OK: msg = f"Unknown error while authentifying with registry: {token_resp!s}" - raise exceptions.RegistryConnectionError(msg) + raise RegistryConnectionError(msg=msg) bearer_code = (await token_resp.json())["token"] headers = {"Authorization": f"Bearer {bearer_code}"} async with getattr(session, method.lower())( @@ -160,13 +164,13 @@ async def _auth_registry_request( ) as resp_wtoken: if resp_wtoken.status == HTTPStatus.NOT_FOUND: logger.exception("path to registry not found: %s", url) - raise exceptions.ServiceNotAvailableError(str(url)) + raise ServiceNotAvailableError(service_name=f"{url}") if resp_wtoken.status > 399: logger.exception( "Unknown error while accessing with token authorized registry: %s", str(resp_wtoken), ) - raise exceptions.RegistryConnectionError(str(resp_wtoken)) + raise RegistryConnectionError(msg=f"{resp_wtoken}") resp_data = await resp_wtoken.json(content_type=None) resp_headers = resp_wtoken.headers return (resp_data, resp_headers) @@ -177,18 +181,18 @@ async def _auth_registry_request( ) as resp_wbasic: if resp_wbasic.status == HTTPStatus.NOT_FOUND: logger.exception("path to registry not found: %s", url) - raise exceptions.ServiceNotAvailableError(str(url)) + raise ServiceNotAvailableError(service_name=f"{url}") if resp_wbasic.status > 399: logger.exception( "Unknown error while accessing with token authorized registry: %s", str(resp_wbasic), ) - raise exceptions.RegistryConnectionError(str(resp_wbasic)) + raise RegistryConnectionError(msg=f"{resp_wbasic}") resp_data = await resp_wbasic.json(content_type=None) resp_headers = resp_wbasic.headers return (resp_data, resp_headers) msg = f"Unknown registry authentification type: {url}" - raise exceptions.RegistryConnectionError(msg) + raise RegistryConnectionError(msg=msg) async def registry_request( @@ -229,7 +233,7 @@ async def _is_registry_responsive(app: FastAPI) -> bool: app, path, no_cache=True, timeout=ClientTimeout(total=1.0) ) return True - except (TimeoutError, exceptions.DirectorException) as exc: + except (TimeoutError, DirectorRuntimeError) as exc: logger.debug("Registry not responsive: %s", exc) return False diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 96ad487fcfa..673d52605eb 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -18,11 +18,16 @@ from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.docker_registry import RegistrySettings -from simcore_service_director import exceptions, producer +from simcore_service_director import producer from simcore_service_director.constants import ( CPU_RESOURCE_LIMIT_KEY, MEM_RESOURCE_LIMIT_KEY, ) +from simcore_service_director.core.errors import ( + DirectorRuntimeError, + ServiceNotAvailableError, + ServiceUUIDNotFoundError, +) from simcore_service_director.core.settings import ApplicationSettings from tenacity import Retrying from tenacity.stop import stop_after_delay @@ -73,7 +78,7 @@ async def push_start_services( service_entry_point = pushed_service["entry_point"] service_uuid = str(uuid.uuid1()) service_basepath = "/my/base/path" - with pytest.raises(exceptions.ServiceUUIDNotFoundError): + with pytest.raises(ServiceUUIDNotFoundError): await producer.get_service_details(app, service_uuid) # start the service started_service = await producer.start_service( @@ -143,7 +148,7 @@ async def push_start_services( # even emulate a legacy dy-service that does not implement a save-state feature # so here we must make save_state=False await producer.stop_service(app, node_uuid=service_uuid, save_state=False) - with pytest.raises(exceptions.ServiceUUIDNotFoundError): + with pytest.raises(ServiceUUIDNotFoundError): await producer.get_service_details(app, service_uuid) @@ -161,11 +166,11 @@ async def test_find_service_tag(): "1.2.3", ] } - with pytest.raises(exceptions.ServiceNotAvailableError): + with pytest.raises(ServiceNotAvailableError): await producer._find_service_tag( # noqa: SLF001 list_of_images, "some_wrong_key", None ) - with pytest.raises(exceptions.ServiceNotAvailableError): + with pytest.raises(ServiceNotAvailableError): await producer._find_service_tag( # noqa: SLF001 list_of_images, my_service_key, "some wrong key" ) @@ -390,7 +395,7 @@ async def test_get_service_key_version_from_docker_service_except_invalid_keys( } } } - with pytest.raises(exceptions.DirectorException): + with pytest.raises(DirectorRuntimeError): await producer._get_service_key_version_from_docker_service( # noqa: SLF001 docker_service_partial_inspect, registry_settings ) From 444c4c99c573b4c59d3980b0dc7a520b00dd324c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:52:03 +0100 Subject: [PATCH 175/201] fixed test using monkeypatch --- .../registry_proxy.py | 2 +- .../tests/unit/test_registry_proxy.py | 21 ++++++++++++++++--- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index c6b5c676f7c..528c1913abd 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -208,7 +208,7 @@ async def registry_request( cache: SimpleMemoryCache = app.state.registry_cache_memory cache_key = f"{method}_{path}" if not no_cache and (cached_response := await cache.get(cache_key)): - assert isinstance(tuple[dict, Mapping], cached_response) # nosec + assert isinstance(cached_response, tuple) # nosec return cast(tuple[dict, Mapping], cached_response) app_settings = get_application_settings(app) diff --git a/services/director/tests/unit/test_registry_proxy.py b/services/director/tests/unit/test_registry_proxy.py index 31138fac03d..2e5738c2670 100644 --- a/services/director/tests/unit/test_registry_proxy.py +++ b/services/director/tests/unit/test_registry_proxy.py @@ -6,8 +6,10 @@ import pytest from fastapi import FastAPI +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director import registry_proxy +from simcore_service_director.core.settings import ApplicationSettings async def test_list_no_services_available( @@ -201,16 +203,27 @@ async def test_get_image_details( assert details == service_description +@pytest.fixture +def configure_registry_caching( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, {"DIRECTOR_REGISTRY_CACHING": True} + ) + + async def test_registry_caching( configure_registry_access: EnvVarsDict, + configure_registry_caching: EnvVarsDict, + app_settings: ApplicationSettings, app: FastAPI, push_services, ): images = await push_services( - number_of_computational_services=1, number_of_interactive_services=1 + number_of_computational_services=21, number_of_interactive_services=21 ) - # TODO: use monkeypatching - # config.DIRECTOR_REGISTRY_CACHING = True + assert app_settings.DIRECTOR_REGISTRY_CACHING is True + start_time = time.perf_counter() services = await registry_proxy.list_services(app, registry_proxy.ServiceType.ALL) time_to_retrieve_without_cache = time.perf_counter() - start_time @@ -220,6 +233,8 @@ async def test_registry_caching( time_to_retrieve_with_cache = time.perf_counter() - start_time assert len(services) == len(images) assert time_to_retrieve_with_cache < time_to_retrieve_without_cache + print("time to retrieve services without cache: ", time_to_retrieve_without_cache) + print("time to retrieve services with cache: ", time_to_retrieve_with_cache) @pytest.mark.skip(reason="test needs credentials to real registry") From ea84b6b7374c3f3f490a9ef4a638b9f443935531 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 20:06:45 +0100 Subject: [PATCH 176/201] linter --- packages/pytest-simcore/src/pytest_simcore/docker.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker.py b/packages/pytest-simcore/src/pytest_simcore/docker.py index 9a5d5f26bdf..89e88484a4b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker.py @@ -1,3 +1,9 @@ +# pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=unused-argument +# pylint:disable=unused-variable + import asyncio import contextlib from collections.abc import AsyncIterator, Callable From 0500c7e5af6a0f1cbf6c1be40a2fc273d5325ea1 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 20:22:52 +0100 Subject: [PATCH 177/201] missing dependency --- .../requirements/_base.txt | 27 +++++++++++++++++ .../requirements/_test.in | 1 + .../requirements/_test.txt | 29 +++++++++++++++++++ .../requirements/_tools.txt | 1 + 4 files changed, 58 insertions(+) diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index abc242615c5..bd5578eb078 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -16,6 +16,10 @@ attrs==24.2.0 # referencing click==8.1.7 # via typer +deprecated==1.2.14 + # via + # opentelemetry-api + # opentelemetry-semantic-conventions dnspython==2.6.1 # via email-validator email-validator==2.2.0 @@ -26,6 +30,8 @@ idna==3.10 # via # email-validator # yarl +importlib-metadata==8.5.0 + # via opentelemetry-api jinja2==3.1.4 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -54,6 +60,19 @@ mdurl==0.1.2 # via markdown-it-py multidict==6.1.0 # via yarl +opentelemetry-api==1.28.0 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg + # opentelemetry-semantic-conventions +opentelemetry-instrumentation==0.49b0 + # via opentelemetry-instrumentation-asyncpg +opentelemetry-instrumentation-asyncpg==0.49b0 + # via -r requirements/../../../packages/postgres-database/requirements/_base.in +opentelemetry-semantic-conventions==0.49b0 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg orjson==3.10.7 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -61,6 +80,8 @@ orjson==3.10.7 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in +packaging==24.1 + # via opentelemetry-instrumentation psycopg2-binary==2.9.9 # via sqlalchemy pydantic==1.10.18 @@ -109,5 +130,11 @@ typing-extensions==4.12.2 # alembic # pydantic # typer +wrapt==1.16.0 + # via + # deprecated + # opentelemetry-instrumentation yarl==1.12.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +zipp==3.20.2 + # via importlib-metadata diff --git a/packages/notifications-library/requirements/_test.in b/packages/notifications-library/requirements/_test.in index eafb9291680..005795b87e7 100644 --- a/packages/notifications-library/requirements/_test.in +++ b/packages/notifications-library/requirements/_test.in @@ -8,6 +8,7 @@ # --constraint _base.txt +aiodocker coverage docker faker diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 55a7d9b8ee8..a37d4549331 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -1,3 +1,17 @@ +aiodocker==0.23.0 + # via -r requirements/_test.in +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiosignal==1.3.1 + # via aiohttp +attrs==24.2.0 + # via + # -c requirements/_base.txt + # aiohttp certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt @@ -12,6 +26,10 @@ docker==7.1.0 # via -r requirements/_test.in faker==29.0.0 # via -r requirements/_test.in +frozenlist==1.5.0 + # via + # aiohttp + # aiosignal greenlet==3.1.1 # via # -c requirements/_base.txt @@ -22,14 +40,21 @@ idna==3.10 # via # -c requirements/_base.txt # requests + # yarl iniconfig==2.0.0 # via pytest +multidict==6.1.0 + # via + # -c requirements/_base.txt + # aiohttp + # yarl mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy packaging==24.1 # via + # -c requirements/_base.txt # pytest # pytest-sugar pluggy==1.5.0 @@ -100,3 +125,7 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # docker # requests +yarl==1.12.1 + # via + # -c requirements/_base.txt + # aiohttp diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index 217752d687f..e9ee7a7d400 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -40,6 +40,7 @@ nodeenv==1.9.1 # via pre-commit packaging==24.1 # via + # -c requirements/_base.txt # -c requirements/_test.txt # black # build From 112824580ed994a5fd64e69fea020738919b9df1 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 20:25:11 +0100 Subject: [PATCH 178/201] missing faker --- tests/swarm-deploy/requirements/_test.in | 1 + tests/swarm-deploy/requirements/_test.txt | 34 +++++++++++++++++++++-- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/tests/swarm-deploy/requirements/_test.in b/tests/swarm-deploy/requirements/_test.in index 9b1e1ece9fe..e325cd6a11f 100644 --- a/tests/swarm-deploy/requirements/_test.in +++ b/tests/swarm-deploy/requirements/_test.in @@ -11,6 +11,7 @@ aiodocker alembic click docker +faker jsonschema pytest pytest-asyncio diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 2f4dc983011..6fa1a4ebab6 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -117,6 +117,8 @@ docker==7.1.0 # -r requirements/_test.in email-validator==2.2.0 # via pydantic +faker==30.8.2 + # via -r requirements/_test.in fast-depends==2.4.12 # via faststream faststream==0.5.28 @@ -197,6 +199,10 @@ opentelemetry-api==1.27.0 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions @@ -213,7 +219,22 @@ opentelemetry-exporter-otlp-proto-grpc==1.27.0 opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.48b0 - # via opentelemetry-instrumentation-requests + # via + # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-aiopg==0.48b0 + # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in +opentelemetry-instrumentation-asyncpg==0.48b0 + # via -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +opentelemetry-instrumentation-dbapi==0.48b0 + # via opentelemetry-instrumentation-aiopg +opentelemetry-instrumentation-redis==0.48b0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -231,6 +252,9 @@ opentelemetry-sdk==1.27.0 # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.48b0 # via + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk opentelemetry-util-http==0.48b0 @@ -343,7 +367,9 @@ pytest-runner==6.0.1 pytest-sugar==1.0.0 # via -r requirements/_test.in python-dateutil==2.9.0.post0 - # via arrow + # via + # arrow + # faker python-dotenv==1.0.1 # via -r requirements/_test.in pyyaml==6.0.2 @@ -465,6 +491,7 @@ typing-extensions==4.12.2 # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # aiodebug # alembic + # faker # faststream # flexcache # flexparser @@ -494,6 +521,9 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis yarl==1.12.1 # via # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in From 7b85e9bb9d69eb123d424db0d23f4cedb53e86b0 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 7 Nov 2024 22:17:00 +0100 Subject: [PATCH 179/201] correct status code --- .../src/simcore_service_catalog/api/rest/_services_resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py index 701c4b41f3d..9bbca5b902f 100644 --- a/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py +++ b/services/catalog/src/simcore_service_catalog/api/rest/_services_resources.py @@ -148,7 +148,7 @@ async def _get_service_labels( # `busybox:latest` or `traefik:latest` because # the director-v0 cannot extract labels from them # and will fail validating the key or the version - if err.status_code == status.HTTP_400_BAD_REQUEST: + if err.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY: return None raise From b318dfe57c5b67e88b0c115824c8dac0cfd18ef6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 8 Nov 2024 08:12:10 +0100 Subject: [PATCH 180/201] @GitHK review: _logger --- .../api/rest/_running_interactive_services.py | 10 +- .../api/rest/_service_extras.py | 4 +- .../api/rest/_services.py | 8 +- .../src/simcore_service_director/producer.py | 126 +++++++++--------- .../registry_proxy.py | 4 +- 5 files changed, 77 insertions(+), 75 deletions(-) diff --git a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py index e79d4f70f8f..61457413688 100644 --- a/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py +++ b/services/director/src/simcore_service_director/api/rest/_running_interactive_services.py @@ -20,7 +20,7 @@ router = APIRouter() -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @router.get("/running_interactive_services") @@ -29,7 +29,7 @@ async def list_running_services( user_id: UserID | None = None, project_id: ProjectID | None = None, ) -> Envelope[list[dict[str, Any]]]: - log.debug( + _logger.debug( "Client does list_running_services request user_id %s, project_id %s", user_id, project_id, @@ -56,7 +56,7 @@ async def start_service( service_tag: ServiceVersion | None = None, x_simcore_user_agent: str = Header(...), ) -> Envelope[dict[str, Any]]: - log.debug( + _logger.debug( "Client does start_service with user_id %s, project_id %s, service %s:%s, service_uuid %s, service_basepath %s, request_simcore_user_agent %s", user_id, project_id, @@ -97,7 +97,7 @@ async def get_running_service( the_app: Annotated[FastAPI, Depends(get_app)], service_uuid: UUID, ) -> Envelope[dict[str, Any]]: - log.debug( + _logger.debug( "Client does get_running_service with service_uuid %s", service_uuid, ) @@ -119,7 +119,7 @@ async def stop_service( service_uuid: UUID, save_state: bool = True, ) -> None: - log.debug( + _logger.debug( "Client does stop_service with service_uuid %s", service_uuid, ) diff --git a/services/director/src/simcore_service_director/api/rest/_service_extras.py b/services/director/src/simcore_service_director/api/rest/_service_extras.py index 5d1d069bb09..ab61e8ac1ad 100644 --- a/services/director/src/simcore_service_director/api/rest/_service_extras.py +++ b/services/director/src/simcore_service_director/api/rest/_service_extras.py @@ -11,7 +11,7 @@ router = APIRouter() -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @router.get("/service_extras/{service_key:path}/{service_version}") @@ -20,7 +20,7 @@ async def list_service_extras( service_key: ServiceKey, service_version: ServiceVersion, ) -> Envelope[dict[str, Any]]: - log.debug( + _logger.debug( "Client does service_extras_by_key_version_get request with service_key %s, service_version %s", service_key, service_version, diff --git a/services/director/src/simcore_service_director/api/rest/_services.py b/services/director/src/simcore_service_director/api/rest/_services.py index d9b2799a5dd..157f5305d1b 100644 --- a/services/director/src/simcore_service_director/api/rest/_services.py +++ b/services/director/src/simcore_service_director/api/rest/_services.py @@ -13,7 +13,7 @@ router = APIRouter() -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) class _ErrorMessage(BaseModel): @@ -38,7 +38,7 @@ async def list_services( the_app: Annotated[FastAPI, Depends(get_app)], service_type: ServiceType | None = None, ) -> Envelope[list[dict[str, Any]]]: - log.debug( + _logger.debug( "Client does list_services request with service_type %s", service_type, ) @@ -73,7 +73,7 @@ async def list_service_labels( service_key: ServiceKey, service_version: ServiceVersion, ) -> Envelope[dict[str, Any]]: - log.debug( + _logger.debug( "Retrieving service labels with service_key %s, service_version %s", service_key, service_version, @@ -101,7 +101,7 @@ async def get_service( service_key: ServiceKey, service_version: ServiceVersion, ) -> Envelope[list[dict[str, Any]]]: - log.debug( + _logger.debug( "Client does get_service with service_key %s, service_version %s", service_key, service_version, diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 2fbbf371054..431f13aa3c7 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -55,7 +55,7 @@ from .services_common import ServicesCommonSettings from .system_utils import get_system_extra_hosts_raw -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) class ServiceState(Enum): @@ -77,7 +77,7 @@ async def _create_auth(registry_settings: RegistrySettings) -> dict[str, str]: async def _check_node_uuid_available( client: aiodocker.docker.Docker, node_uuid: str ) -> None: - log.debug("Checked if UUID %s is already in use", node_uuid) + _logger.debug("Checked if UUID %s is already in use", node_uuid) # check if service with same uuid already exists try: # not filtering by "swarm_stack_name" label because it's safer @@ -91,7 +91,7 @@ async def _check_node_uuid_available( raise GenericDockerError(err=msg) from err if list_of_running_services_w_uuid: raise ServiceUUIDInUseError(service_uuid=node_uuid) - log.debug("UUID %s is free", node_uuid) + _logger.debug("UUID %s is free", node_uuid) def _check_setting_correctness(setting: dict) -> None: @@ -103,7 +103,7 @@ def _check_setting_correctness(setting: dict) -> None: def _parse_mount_settings(settings: list[dict]) -> list[dict]: mounts = [] for s in settings: - log.debug("Retrieved mount settings %s", s) + _logger.debug("Retrieved mount settings %s", s) mount = {} mount["ReadOnly"] = True if "ReadOnly" in s and s["ReadOnly"] in ["false", "False", False]: @@ -113,12 +113,12 @@ def _parse_mount_settings(settings: list[dict]) -> list[dict]: if field in s: mount[field] = s[field] else: - log.warning( + _logger.warning( "Mount settings have wrong format. Required keys [Source, Target, Type]" ) continue - log.debug("Append mount settings %s", mount) + _logger.debug("Append mount settings %s", mount) mounts.append(mount) return mounts @@ -130,13 +130,13 @@ def _parse_mount_settings(settings: list[dict]) -> list[dict]: def _parse_env_settings(settings: list[str]) -> dict: envs = {} for s in settings: - log.debug("Retrieved env settings %s", s) + _logger.debug("Retrieved env settings %s", s) if "=" in s: parts = s.split("=") if len(parts) == _ENV_NUM_ELEMENTS: envs.update({parts[0]: parts[1]}) - log.debug("Parsed env settings %s", s) + _logger.debug("Parsed env settings %s", s) return envs @@ -151,7 +151,7 @@ async def _read_service_settings( else None ) - log.debug("Retrieved %s settings: %s", settings_name, pformat(settings)) + _logger.debug("Retrieved %s settings: %s", settings_name, pformat(settings)) return settings @@ -187,7 +187,7 @@ async def _create_docker_service_params( app, service_key, service_tag, SERVICE_REVERSE_PROXY_SETTINGS ) service_name = registry_proxy.get_service_last_names(service_key) + "_" + node_uuid - log.debug("Converting labels to docker runtime parameters") + _logger.debug("Converting labels to docker runtime parameters") service_default_envs = { "POSTGRES_ENDPOINT": app_settings.DIRECTOR_POSTGRES.dsn, "POSTGRES_USER": app_settings.DIRECTOR_POSTGRES.POSTGRES_USER, @@ -317,7 +317,7 @@ async def _create_docker_service_params( "networks": [internal_network_id] if internal_network_id else [], } if app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: - log.debug( + _logger.debug( "adding custom constraints %s ", app_settings.DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS, ) @@ -439,14 +439,14 @@ async def _create_docker_service_params( ): # python-API compatible docker_params["task_template"]["Placement"]["Constraints"] += param["value"] elif param["name"] == "env": - log.debug("Found env parameter %s", param["value"]) + _logger.debug("Found env parameter %s", param["value"]) env_settings = _parse_env_settings(param["value"]) if env_settings: docker_params["task_template"]["ContainerSpec"]["Env"].update( env_settings ) elif param["name"] == "mount": - log.debug("Found mount parameter %s", param["value"]) + _logger.debug("Found mount parameter %s", param["value"]) mount_settings: list[dict] = _parse_mount_settings(param["value"]) if mount_settings: docker_params["task_template"]["ContainerSpec"]["Mounts"].extend( @@ -491,7 +491,7 @@ async def _create_docker_service_params( } docker_params["task_template"]["ContainerSpec"]["Env"].update(resource_limits) - log.debug( + _logger.debug( "Converted labels to docker runtime parameters: %s", pformat(docker_params) ) return docker_params @@ -500,11 +500,11 @@ async def _create_docker_service_params( def _get_service_entrypoint( service_boot_parameters_labels: list[dict[str, Any]] ) -> str: - log.debug("Getting service entrypoint") + _logger.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) if param["name"] == "entry_point": - log.debug("Service entrypoint is %s", param["value"]) + _logger.debug("Service entrypoint is %s", param["value"]) assert isinstance(param["value"], str) # nosec return param["value"] return "" @@ -536,7 +536,7 @@ async def _get_swarm_network( async def _get_docker_image_port_mapping( service: dict, ) -> tuple[str | None, int | None]: - log.debug("getting port published by service: %s", service["Spec"]["Name"]) + _logger.debug("getting port published by service: %s", service["Spec"]["Name"]) published_ports = [] target_ports = [] @@ -548,7 +548,7 @@ async def _get_docker_image_port_mapping( published_ports.append(port["PublishedPort"]) target_ports.append(port["TargetPort"]) - log.debug("Service %s publishes: %s ports", service["ID"], published_ports) + _logger.debug("Service %s publishes: %s ports", service["ID"], published_ports) published_port = None target_port = None if published_ports: @@ -578,7 +578,7 @@ async def _pass_port_to_service( _check_setting_correctness(param) if param["name"] == "published_host": route = param["value"] - log.debug( + _logger.debug( "Service needs to get published host %s:%s using route %s", app_settings.DIRECTOR_PUBLISHED_HOST_NAME, port, @@ -589,11 +589,11 @@ async def _pass_port_to_service( "hostname": str(app_settings.DIRECTOR_PUBLISHED_HOST_NAME), "port": str(port), } - log.debug("creating request %s and query %s", service_url, query_string) + _logger.debug("creating request %s and query %s", service_url, query_string) async with session.post(service_url, data=query_string) as response: - log.debug("query response: %s", await response.text()) + _logger.debug("query response: %s", await response.text()) return - log.debug("service %s does not need to know its external port", service_name) + _logger.debug("service %s does not need to know its external port", service_name) async def _create_network_name(service_name: str, node_uuid: str) -> str: @@ -603,7 +603,7 @@ async def _create_network_name(service_name: str, node_uuid: str) -> str: async def _create_overlay_network_in_swarm( client: aiodocker.docker.Docker, service_name: str, node_uuid: str ) -> str: - log.debug( + _logger.debug( "Creating overlay network for service %s with uuid %s", service_name, node_uuid ) network_name = await _create_network_name(service_name, node_uuid) @@ -614,7 +614,7 @@ async def _create_overlay_network_in_swarm( "Labels": {_to_simcore_runtime_docker_label_key("node_id"): node_uuid}, } docker_network = await client.networks.create(network_config) - log.debug( + _logger.debug( "Network %s created for service %s with uuid %s", network_name, service_name, @@ -629,7 +629,7 @@ async def _create_overlay_network_in_swarm( async def _remove_overlay_network_of_swarm( client: aiodocker.docker.Docker, node_uuid: str ) -> None: - log.debug("Removing overlay network for service with uuid %s", node_uuid) + _logger.debug("Removing overlay network for service with uuid %s", node_uuid) try: networks = await client.networks.list() networks = [ @@ -640,12 +640,12 @@ async def _remove_overlay_network_of_swarm( and x["Labels"][_to_simcore_runtime_docker_label_key("node_id")] == node_uuid ] - log.debug("Found %s networks with uuid %s", len(networks), node_uuid) + _logger.debug("Found %s networks with uuid %s", len(networks), node_uuid) # remove any network in the list (should be only one) for network in networks: docker_network = aiodocker.networks.DockerNetwork(client, network["Id"]) await docker_network.delete() - log.debug("Removed %s networks with uuid %s", len(networks), node_uuid) + _logger.debug("Removed %s networks with uuid %s", len(networks), node_uuid) except aiodocker.DockerError as err: msg = "Error while removing networks" raise GenericDockerError(err=msg) from err @@ -656,7 +656,7 @@ async def _get_service_state( ) -> tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] - log.debug("Getting service %s state", service_name) + _logger.debug("Getting service %s state", service_name) tasks = await client.tasks.list(filters={"service": service_name}) # wait for tasks @@ -676,17 +676,17 @@ async def _get_service_state( last_task = sorted(tasks, key=lambda task: task["UpdatedAt"])[-1] task_state = last_task["Status"]["State"] - log.debug("%s %s", service["ID"], task_state) + _logger.debug("%s %s", service["ID"], task_state) last_task_state = ServiceState.STARTING # default last_task_error_msg = last_task["Status"].get("Err", "") if task_state in ("failed"): # check if it failed already the max number of attempts we allow for if len(tasks) < app_settings.DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: - log.debug("number of tasks: %s", len(tasks)) + _logger.debug("number of tasks: %s", len(tasks)) last_task_state = ServiceState.STARTING else: - log.error( + _logger.error( "service %s failed with %s after %s trials", service_name, last_task["Status"], @@ -694,7 +694,7 @@ async def _get_service_state( ) last_task_state = ServiceState.FAILED elif task_state in ("rejected"): - log.error("service %s failed with %s", service_name, last_task["Status"]) + _logger.error("service %s failed with %s", service_name, last_task["Status"]) last_task_state = ServiceState.FAILED elif task_state in ("pending"): last_task_state = ServiceState.PENDING @@ -708,7 +708,9 @@ async def _get_service_state( task_state_update_time = to_datetime(last_task["Status"]["Timestamp"]) time_since_running = now - task_state_update_time - log.debug("Now is %s, time since running mode is %s", now, time_since_running) + _logger.debug( + "Now is %s, time since running mode is %s", now, time_since_running + ) if time_since_running > timedelta( seconds=app_settings.DIRECTOR_SERVICES_STATE_MONITOR_S ): @@ -718,7 +720,7 @@ async def _get_service_state( elif task_state in ("complete", "shutdown"): last_task_state = ServiceState.COMPLETE - log.debug("service running state is %s", last_task_state) + _logger.debug("service running state is %s", last_task_state) return (last_task_state, last_task_error_msg) @@ -727,7 +729,7 @@ async def _wait_until_service_running_or_failed( ) -> None: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] - log.debug("Waiting for service %s to start", service_name) + _logger.debug("Waiting for service %s to start", service_name) while True: tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) @@ -736,9 +738,9 @@ async def _wait_until_service_running_or_failed( if tasks: last_task = tasks[0] task_state = last_task["Status"]["State"] - log.debug("%s %s", service["ID"], task_state) + _logger.debug("%s %s", service["ID"], task_state) if task_state in ("failed", "rejected"): - log.error( + _logger.error( "Error while waiting for service with %s", last_task["Status"] ) raise ServiceStartTimeoutError( @@ -748,7 +750,7 @@ async def _wait_until_service_running_or_failed( break # allows dealing with other events instead of wasting time here await asyncio.sleep(1) # 1s - log.debug("Waited for service %s to start", service_name) + _logger.debug("Waited for service %s to start", service_name) async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[str]]: @@ -756,11 +758,11 @@ async def _get_repos_from_key(app: FastAPI, service_key: str) -> dict[str, list[ list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) } - log.debug("entries %s", list_of_images) + _logger.debug("entries %s", list_of_images) if not list_of_images[service_key]: raise ServiceNotAvailableError(service_name=service_key) - log.debug( + _logger.debug( "Service %s has the following list of images available: %s", service_key, list_of_images, @@ -816,7 +818,7 @@ async def _find_service_tag( service_name=service_key, service_tag=service_tag ) - log.debug("Service tag found is %s ", service_tag) + _logger.debug("Service tag found is %s ", service_tag) assert tag is not None # nosec return tag @@ -849,7 +851,7 @@ async def _start_docker_service( internal_network_id=internal_network_id, request_simcore_user_agent=request_simcore_user_agent, ) - log.debug( + _logger.debug( "Starting docker service %s:%s using parameters %s", service_key, service_tag, @@ -862,7 +864,7 @@ async def _start_docker_service( # error while starting service msg = f"Error while starting service: {service!s}" raise DirectorRuntimeError(msg=msg) - log.debug("Service started now waiting for it to run") + _logger.debug("Service started now waiting for it to run") # get the full info from docker service = await client.services.inspect(service["ID"]) @@ -872,7 +874,7 @@ async def _start_docker_service( ) # wait for service to start - log.debug("Service %s successfully started", service_name) + _logger.debug("Service %s successfully started", service_name) # the docker swarm maybe opened some random port to access the service, get the latest version of the service service = await client.services.inspect(service["ID"]) published_port, target_port = await _get_docker_image_port_mapping( @@ -911,11 +913,11 @@ async def _start_docker_service( } except ServiceStartTimeoutError: - log.exception("Service failed to start") + _logger.exception("Service failed to start") await _silent_service_cleanup(app, node_uuid) raise except aiodocker.DockerError as err: - log.exception("Unexpected error") + _logger.exception("Unexpected error") await _silent_service_cleanup(app, node_uuid) raise ServiceNotAvailableError( service_name=service_key, service_tag=service_tag @@ -937,14 +939,14 @@ async def _create_node( node_base_path: str, request_simcore_user_agent: str, ) -> list[dict]: # pylint: disable=R0913, R0915 - log.debug( + _logger.debug( "Creating %s docker services for node %s and base path %s for user %s", len(list_of_services), node_uuid, node_base_path, user_id, ) - log.debug("Services %s will be started", list_of_services) + _logger.debug("Services %s will be started", list_of_services) # if the service uses several docker images, a network needs to be setup to connect them together inter_docker_network_id = None @@ -953,7 +955,7 @@ async def _create_node( inter_docker_network_id = await _create_overlay_network_in_swarm( client, service_name, node_uuid ) - log.debug("Created docker network in swarm for service %s", service_name) + _logger.debug("Created docker network in swarm for service %s", service_name) containers_meta_data = [] for service in list_of_services: @@ -1014,7 +1016,7 @@ async def start_service( request_simcore_user_agent: str, ) -> dict: app_settings = get_application_settings(app) - log.debug( + _logger.debug( "starting service %s:%s using uuid %s, basepath %s", service_key, service_tag, @@ -1026,11 +1028,11 @@ async def start_service( await _check_node_uuid_available(client, node_uuid) list_of_images = await _get_repos_from_key(app, service_key) service_tag = await _find_service_tag(list_of_images, service_key, service_tag) - log.debug("Found service to start %s:%s", service_key, service_tag) + _logger.debug("Found service to start %s:%s", service_key, service_tag) list_of_services_to_start = [{"key": service_key, "tag": service_tag}] # find the service dependencies list_of_dependencies = await _get_dependant_repos(app, service_key, service_tag) - log.debug("Found service dependencies: %s", list_of_dependencies) + _logger.debug("Found service dependencies: %s", list_of_dependencies) if list_of_dependencies: list_of_services_to_start.extend(list_of_dependencies) @@ -1201,7 +1203,7 @@ async def _save_service_state( # METHOD NOT ALLOWED https://httpstatuses.com/405 # NOT FOUND https://httpstatuses.com/404 # - log.warning( + _logger.warning( "Service '%s' does not seem to implement save state functionality: %s. Skipping save", service_host_name, err, @@ -1210,7 +1212,7 @@ async def _save_service_state( # upss ... could service had troubles saving, reraise raise else: - log.info( + _logger.info( "Service '%s' successfully saved its state: %s", service_host_name, f"{response}", @@ -1220,7 +1222,7 @@ async def _save_service_state( @run_sequentially_in_context(target_args=["node_uuid"]) async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> None: app_settings = get_application_settings(app) - log.debug( + _logger.debug( "stopping service with node_uuid=%s, save_state=%s", node_uuid, save_state ) @@ -1243,7 +1245,7 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non if not list_running_services_with_uuid: raise ServiceUUIDNotFoundError(service_uuid=node_uuid) - log.debug("found service(s) with uuid %s", list_running_services_with_uuid) + _logger.debug("found service(s) with uuid %s", list_running_services_with_uuid) # save the state of the main service if it can service_details = await get_service_details(app, node_uuid) @@ -1263,7 +1265,7 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non # If state save is enforced if save_state: - log.debug("saving state of service %s...", service_host_name) + _logger.debug("saving state of service %s...", service_host_name) try: await _save_service_state( service_host_name, session=get_client_session(app) @@ -1277,7 +1279,7 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non ) from err except ClientError as err: - log.warning( + _logger.warning( "Could not save state because %s is unreachable [%s]." "Resuming stop_service.", service_host_name, @@ -1286,9 +1288,9 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non # remove the services try: - log.debug("removing services ...") + _logger.debug("removing services ...") for service in list_running_services_with_uuid: - log.debug("removing %s", service["Spec"]["Name"]) + _logger.debug("removing %s", service["Spec"]["Name"]) await client.services.delete(service["Spec"]["Name"]) except aiodocker.DockerError as err: @@ -1296,9 +1298,9 @@ async def stop_service(app: FastAPI, *, node_uuid: str, save_state: bool) -> Non raise GenericDockerError(err=msg) from err # remove network(s) - log.debug("removed services, now removing network...") + _logger.debug("removed services, now removing network...") await _remove_overlay_network_of_swarm(client, node_uuid) - log.debug("removed network") + _logger.debug("removed network") if app_settings.DIRECTOR_MONITORING_ENABLED: get_instrumentation(app).services_stopped.labels( diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 528c1913abd..0994292ab99 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -378,7 +378,7 @@ async def get_repo_details(app: FastAPI, image_key: str) -> list[dict[str, Any]] results = await limited_gather( *[get_image_details(app, image_key, tag) for tag in image_tags], reraise=False, - log=logger, + _logger=logger, limit=_MAX_CONCURRENT_CALLS, ) return [result for result in results if not isinstance(result, BaseException)] @@ -400,7 +400,7 @@ async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: results = await limited_gather( *[get_repo_details(app, repo) for repo in repos], reraise=False, - log=logger, + _logger=logger, limit=_MAX_CONCURRENT_CALLS, ) From 6fe63f55cff10942c5039ded64ee0c5ae50b506c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 8 Nov 2024 08:17:28 +0100 Subject: [PATCH 181/201] @GitHK review: more stuff --- .../src/simcore_service_director/constants.py | 5 ---- .../core/application.py | 4 ++-- .../test_rest_running_interactive_services.py | 24 ------------------- 3 files changed, 2 insertions(+), 31 deletions(-) diff --git a/services/director/src/simcore_service_director/constants.py b/services/director/src/simcore_service_director/constants.py index 9ce6ea81a44..bb11b71cec9 100644 --- a/services/director/src/simcore_service_director/constants.py +++ b/services/director/src/simcore_service_director/constants.py @@ -20,9 +20,4 @@ DIRECTOR_SIMCORE_SERVICES_PREFIX: Final[str] = "simcore/services" - -# TO remove -# used when in devel mode vs release mode -NODE_SCHEMA_LOCATION: Final[str] = f"{API_ROOT}/v0/schemas/node-meta-v0.0.1.json" - DATETIME_FORMAT: Final[str] = "%Y-%m-%dT%H:%M:%S.%f" diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index ae8467f902d..10fb32b5518 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -21,7 +21,7 @@ _LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR _NOISY_LOGGERS: Final[tuple[str]] = ("werkzeug",) -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: @@ -32,7 +32,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: for name in _NOISY_LOGGERS: logging.getLogger(name).setLevel(quiet_level) - logger.info("app settings: %s", settings.json(indent=1)) + _logger.info("app settings: %s", settings.json(indent=1)) app = FastAPI( debug=settings.DIRECTOR_DEBUG, diff --git a/services/director/tests/unit/api/test_rest_running_interactive_services.py b/services/director/tests/unit/api/test_rest_running_interactive_services.py index 7f2820ee68d..97accd23279 100644 --- a/services/director/tests/unit/api/test_rest_running_interactive_services.py +++ b/services/director/tests/unit/api/test_rest_running_interactive_services.py @@ -25,30 +25,6 @@ def _assert_response_and_unwrap_envelope(got: httpx.Response): return body.get("data"), body.get("error") -@pytest.mark.skip( - reason="docker_swarm fixture is a session fixture making it bad running together with other tests that require a swarm" -) -async def test_running_services_post_and_delete_no_swarm( - configure_swarm_stack_name: EnvVarsDict, - client: httpx.AsyncClient, - push_services, - user_id: UserID, - project_id: ProjectID, - api_version_prefix: str, -): - params = { - "user_id": "None", - "project_id": "None", - "service_uuid": "sdlfkj4", - "service_key": "simcore/services/comp/some-key", - } - resp = await client.post( - f"/{api_version_prefix}/running_interactive_services", params=params - ) - data = resp.json() - assert resp.status_code == 500, data - - @pytest.mark.parametrize( "save_state, expected_save_state_call", [(True, True), (False, False), (None, True)] ) From e96cf496a279b47c57f525cd12ebd77eb804507f Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:02:31 +0100 Subject: [PATCH 182/201] env tsts --- .../director/tests/unit/test_core_settings.py | 71 +++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/services/director/tests/unit/test_core_settings.py b/services/director/tests/unit/test_core_settings.py index beaa0cd3056..6cd0f49ff84 100644 --- a/services/director/tests/unit/test_core_settings.py +++ b/services/director/tests/unit/test_core_settings.py @@ -4,6 +4,8 @@ # pylint: disable=too-many-arguments +import pytest +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director.core.settings import ApplicationSettings @@ -25,3 +27,72 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): app_environment["DIRECTOR_DEFAULT_MAX_MEMORY"] == f"{settings.DIRECTOR_DEFAULT_MAX_MEMORY}" ) + + +@pytest.mark.skip(reason="under dev") +def test_mytests(monkeypatch: pytest.MonkeyPatch): + + setenvs_from_envfile( + monkeypatch, + """ + DEFAULT_MAX_MEMORY=0 + DEFAULT_MAX_NANO_CPUS=0 + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS={} + DIRECTOR_REGISTRY_CACHING=True + DIRECTOR_REGISTRY_CACHING_TTL=900 + DIRECTOR_SELF_SIGNED_SSL_FILENAME= + DIRECTOR_SELF_SIGNED_SSL_SECRET_ID= + DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME= + DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=node.labels.io.simcore.autoscaled-node!=true + EXTRA_HOSTS_SUFFIX=undefined + GPG_KEY=0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + HOME=/root + HOSTNAME=osparc-master-01-2 + LANG=C.UTF-8 + LC_ALL=C.UTF-8 + LOGLEVEL=WARNING + MONITORING_ENABLED=True + PATH=/home/scu/.venv/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + POSTGRES_DB=simcoredb + POSTGRES_ENDPOINT=master_postgres:5432 + POSTGRES_HOST=master_postgres + POSTGRES_PASSWORD=z43 + POSTGRES_PORT=5432 + POSTGRES_USER=scu + PUBLISHED_HOST_NAME=osparc-master.speag.com + PWD=/home/scu + PYTHONDONTWRITEBYTECODE=1 + PYTHONOPTIMIZE=TRUE + PYTHON_GET_PIP_SHA256=b3153ec0cf7b7bbf9556932aa37e4981c35dc2a2c501d70d91d2795aa532be79 + PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/eff16c878c7fd6b688b9b4c4267695cf1a0bf01b/get-pip.py + PYTHON_PIP_VERSION=20.1.1 + PYTHON_VERSION=3.6.10 + REGISTRY_AUTH=True + REGISTRY_PATH= + REGISTRY_PW=adminadminadmin + REGISTRY_SSL=True + REGISTRY_URL=registry.osparc-master.speag.com + REGISTRY_USER=admin + REGISTRY_VERSION=v2 + S3_ACCESS_KEY=YE2F1H88P2Z51GYX7HCV + S3_BUCKET_NAME=master-simcore + S3_ENDPOINT=https://ceph-prod-rgw.speag.com + S3_REGION=us-east-1 + S3_SECRET_KEY=7CXBx2HTy6NrVPClatbB6bWZcM1zx782Y7mAaoPs + SC_BOOT_MODE=production + SC_BUILD_TARGET=production + SC_USER_ID=8004 + SC_USER_NAME=scu + SHLVL=0 + SIMCORE_SERVICES_NETWORK_NAME=master-simcore_interactive_services_subnet + STORAGE_ENDPOINT=master_storage:8080 + SWARM_STACK_NAME=master-simcore + TERM=xterm + TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT=http://jaeger:4318 + TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE=50 + TRAEFIK_SIMCORE_ZONE=master_internal_simcore_stack + VIRTUAL_ENV=/home/scu/.venv + """, + ) + + settings = ApplicationSettings.create_from_envs() From 621906c3cbcf33fb4d3c5e5fdd2c4e1b5f6d78cc Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:15:41 +0100 Subject: [PATCH 183/201] adds tracing --- services/director/tests/unit/conftest.py | 7 +++++-- services/docker-compose.yml | 14 +++++++------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py index 9bdafb00527..75ba8e7fd5c 100644 --- a/services/director/tests/unit/conftest.py +++ b/services/director/tests/unit/conftest.py @@ -65,7 +65,10 @@ def configure_swarm_stack_name( app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch ) -> EnvVarsDict: return app_environment | setenvs_from_dict( - monkeypatch, envs={"SWARM_STACK_NAME": "test_stack"} + monkeypatch, + envs={ + "SWARM_STACK_NAME": "test_stack", + }, ) @@ -128,7 +131,7 @@ def app_environment( monkeypatch, { **docker_compose_service_environment_dict, - # ADD here env-var overrides + "DIRECTOR_TRACING": "null", }, ) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 55b97fe872c..5e57dcd04d8 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -251,18 +251,19 @@ services: environment: DIRECTOR_DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY} DIRECTOR_DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS} + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} + DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + DIRECTOR_LOGLEVEL: ${DIRECTOR_LOGLEVEL} + DIRECTOR_MONITORING_ENABLED: ${DIRECTOR_MONITORING_ENABLED} + DIRECTOR_PUBLISHED_HOST_NAME: ${DIRECTOR_PUBLISHED_HOST_NAME} DIRECTOR_REGISTRY_CACHING_TTL: ${DIRECTOR_REGISTRY_CACHING_TTL} DIRECTOR_REGISTRY_CACHING: ${DIRECTOR_REGISTRY_CACHING} DIRECTOR_SELF_SIGNED_SSL_FILENAME: ${DIRECTOR_SELF_SIGNED_SSL_FILENAME} DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_ID} DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME} DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: ${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} - DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} - DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} + DIRECTOR_TRACING: ${DIRECTOR_TRACING} EXTRA_HOSTS_SUFFIX: undefined - DIRECTOR_LOGLEVEL: ${DIRECTOR_LOGLEVEL} - DIRECTOR_MONITORING_ENABLED: ${DIRECTOR_MONITORING_ENABLED} - DIRECTOR_PUBLISHED_HOST_NAME: ${DIRECTOR_PUBLISHED_HOST_NAME} POSTGRES_DB: ${POSTGRES_DB} POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} POSTGRES_HOST: ${POSTGRES_HOST} @@ -283,9 +284,8 @@ services: SIMCORE_SERVICES_NETWORK_NAME: interactive_services_subnet STORAGE_ENDPOINT: ${STORAGE_ENDPOINT} SWARM_STACK_NAME: ${SWARM_STACK_NAME:-simcore} - DIRECTOR_TRACING: ${DIRECTOR_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT} + TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack} volumes: - "/var/run/docker.sock:/var/run/docker.sock" From 8c69e3070ebc116b441461b8d5277a574c549214 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 14:28:58 +0100 Subject: [PATCH 184/201] replaced DEFAULT_MAX_NANO_CPUS by DIRECTOR_DEFAULT_MAX_NANO_CPUS --- .github/workflows/ci-testing-deploy.yml | 4 ++-- .../src/simcore_service_director/core/settings.py | 12 +++--------- services/director/tests/unit/test_producer.py | 4 ++-- tests/e2e-playwright/Makefile | 4 ++-- 4 files changed, 9 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index c32cd3a4ae7..378df3d218c 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -37,8 +37,8 @@ on: - all env: - DEFAULT_MAX_NANO_CPUS: 10000000 - DEFAULT_MAX_MEMORY: 268435456 + DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 + DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 COLUMNS: 120 concurrency: diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index cb6450277bf..ebb3a3c3281 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -10,7 +10,7 @@ PortInt, VersionTag, ) -from pydantic import ByteSize, Field, PositiveInt, parse_obj_as, validator +from pydantic import Field, NonNegativeInt, PositiveInt, validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings @@ -73,14 +73,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) # migrated settings - DIRECTOR_DEFAULT_MAX_NANO_CPUS: PositiveInt = Field( - default=1 * pow(10, 9), - env=["DIRECTOR_DEFAULT_MAX_NANO_CPUS", "DEFAULT_MAX_NANO_CPUS"], - ) - DIRECTOR_DEFAULT_MAX_MEMORY: PositiveInt = Field( - default=parse_obj_as(ByteSize, "2GiB"), - env=["DIRECTOR_DEFAULT_MAX_MEMORY", "DEFAULT_MAX_MEMORY"], - ) + DIRECTOR_DEFAULT_MAX_NANO_CPUS: NonNegativeInt = Field(default=0) + DIRECTOR_DEFAULT_MAX_MEMORY: NonNegativeInt = Field(default=0) DIRECTOR_REGISTRY_CACHING: bool = Field( default=True, description="cache the docker registry internally" ) diff --git a/services/director/tests/unit/test_producer.py b/services/director/tests/unit/test_producer.py index 673d52605eb..4b729c424bb 100644 --- a/services/director/tests/unit/test_producer.py +++ b/services/director/tests/unit/test_producer.py @@ -41,8 +41,8 @@ def ensure_service_runs_in_ci( return app_environment | setenvs_from_dict( monkeypatch, envs={ - "DEFAULT_MAX_MEMORY": f"{int(25 * pow(1024, 2))}", - "DEFAULT_MAX_NANO_CPUS": f"{int(0.01 * pow(10, 9))}", + "DIRECTOR_DEFAULT_MAX_MEMORY": f"{int(25 * pow(1024, 2))}", + "DIRECTOR_DEFAULT_MAX_NANO_CPUS": f"{int(0.01 * pow(10, 9))}", }, ) diff --git a/tests/e2e-playwright/Makefile b/tests/e2e-playwright/Makefile index 88a15a845d1..fc4c0463de2 100644 --- a/tests/e2e-playwright/Makefile +++ b/tests/e2e-playwright/Makefile @@ -18,8 +18,8 @@ define _up_simcore # set some parameters to allow for e2e to run echo LOGIN_REGISTRATION_INVITATION_REQUIRED=0 >> $(SIMCORE_DOT_ENV) echo LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0 >> $(SIMCORE_DOT_ENV) -echo DEFAULT_MAX_NANO_CPUS=1000000000 >> $(SIMCORE_DOT_ENV) -echo DEFAULT_MAX_MEMORY=134217728 >> $(SIMCORE_DOT_ENV) +echo DIRECTOR_DEFAULT_MAX_NANO_CPUS=1000000000 >> $(SIMCORE_DOT_ENV) +echo DIRECTOR_DEFAULT_MAX_MEMORY=134217728 >> $(SIMCORE_DOT_ENV) echo SIDECAR_FORCE_CPU_NODE=1 >> $(SIMCORE_DOT_ENV) $(MAKE_C) $(REPO_BASE_DIR) up-prod ops_ci=1 endef From 0428172312ea4aaccc3ed082a6d9293c9798c9d7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 15:15:28 +0100 Subject: [PATCH 185/201] envs --- .env-devel | 8 +- .../simcore_service_director/core/settings.py | 29 +++----- .../src/simcore_service_director/producer.py | 26 +------ .../simcore_service_director/system_utils.py | 15 ---- .../director/tests/unit/test_core_settings.py | 74 ++++++++++++++++--- .../director/tests/unit/test_system_utils.py | 46 ------------ services/docker-compose.yml | 22 ++---- 7 files changed, 85 insertions(+), 135 deletions(-) delete mode 100644 services/director/src/simcore_service_director/system_utils.py delete mode 100644 services/director/tests/unit/test_system_utils.py diff --git a/.env-devel b/.env-devel index 903bb49510f..fb513496b85 100644 --- a/.env-devel +++ b/.env-devel @@ -76,13 +76,10 @@ DIRECTOR_HOST=director DIRECTOR_LOGLEVEL=INFO DIRECTOR_MONITORING_ENABLED=True DIRECTOR_PORT=8000 -DIRECTOR_PUBLISHED_HOST_NAME= +DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081" DIRECTOR_REGISTRY_CACHING_TTL=900 DIRECTOR_REGISTRY_CACHING=True -DIRECTOR_SELF_SIGNED_SSL_FILENAME= -DIRECTOR_SELF_SIGNED_SSL_SECRET_ID= -DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME= -DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS= +DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null DIRECTOR_TRACING={} EFS_USER_ID=8006 @@ -194,7 +191,6 @@ REDIS_SECURE=false REDIS_USER=null REGISTRY_AUTH=True -REGISTRY_PATH="" REGISTRY_PW=adminadminadmin REGISTRY_SSL=True REGISTRY_URL=registry.osparc-master.speag.com diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index ebb3a3c3281..cf626ad6bd2 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -53,10 +53,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_REMOTE_DEBUG_PORT: PortInt = PortInt(3000) DIRECTOR_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, env=["DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] + ..., env=["DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"] ) DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, + ..., env=[ "DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED", @@ -76,20 +76,15 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_DEFAULT_MAX_NANO_CPUS: NonNegativeInt = Field(default=0) DIRECTOR_DEFAULT_MAX_MEMORY: NonNegativeInt = Field(default=0) DIRECTOR_REGISTRY_CACHING: bool = Field( - default=True, description="cache the docker registry internally" + ..., description="cache the docker registry internally" ) DIRECTOR_REGISTRY_CACHING_TTL: datetime.timedelta = Field( - default=datetime.timedelta(minutes=15), - description="cache time to live value (defaults to 15 minutes)", + ..., description="cache time to live value (defaults to 15 minutes)" ) - DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str = "" - DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[ - str, str - ] = Field(default_factory=dict) - DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: str = "" - DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: str = "" - DIRECTOR_SELF_SIGNED_SSL_FILENAME: str = "" + DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: str | None + + DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: dict[str, str] DIRECTOR_SERVICES_RESTART_POLICY_MAX_ATTEMPTS: int = 10 DIRECTOR_SERVICES_RESTART_POLICY_DELAY_S: int = 12 @@ -105,19 +100,15 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): description="settings for the private registry deployed with the platform", ) - DIRECTOR_EXTRA_HOSTS_SUFFIX: str = Field( - default="undefined", env=["DIRECTOR_EXTRA_HOSTS_SUFFIX", "EXTRA_HOSTS_SUFFIX"] - ) - DIRECTOR_POSTGRES: PostgresSettings = Field(auto_default_from_env=True) STORAGE_ENDPOINT: str = Field(..., description="storage endpoint without scheme") DIRECTOR_PUBLISHED_HOST_NAME: str = Field( - default="", env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] + ..., env=["DIRECTOR_PUBLISHED_HOST_NAME", "PUBLISHED_HOST_NAME"] ) DIRECTOR_SWARM_STACK_NAME: str = Field( - default="undefined-please-check", + ..., env=["DIRECTOR_SWARM_STACK_NAME", "SWARM_STACK_NAME"], ) @@ -129,7 +120,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # useful when developing with an alternative registry namespace DIRECTOR_MONITORING_ENABLED: bool = Field( - default=False, env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] + ..., env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] ) @validator("DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS") diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 431f13aa3c7..ff791a4066f 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -53,7 +53,6 @@ from .core.settings import ApplicationSettings, get_application_settings from .instrumentation import get_instrumentation from .services_common import ServicesCommonSettings -from .system_utils import get_system_extra_hosts_raw _logger = logging.getLogger(__name__) @@ -205,7 +204,6 @@ async def _create_docker_service_params( "SIMCORE_NODE_BASEPATH": node_base_path or "", "SIMCORE_HOST_NAME": service_name, }, - "Hosts": get_system_extra_hosts_raw(app_settings.DIRECTOR_EXTRA_HOSTS_SUFFIX), "Init": True, "Labels": { _to_simcore_runtime_docker_label_key("user_id"): user_id, @@ -226,28 +224,6 @@ async def _create_docker_service_params( "Mounts": [], } - if ( - app_settings.DIRECTOR_SELF_SIGNED_SSL_FILENAME - and app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID - and app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME - ): - # Note: this is useful for S3 client in case of self signed certificate - container_spec["Env"][ - "SSL_CERT_FILE" - ] = app_settings.DIRECTOR_SELF_SIGNED_SSL_FILENAME - container_spec["Secrets"] = [ - { - "SecretID": app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_ID, - "SecretName": app_settings.DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME, - "File": { - "Name": app_settings.DIRECTOR_SELF_SIGNED_SSL_FILENAME, - "Mode": 444, - "UID": "0", - "GID": "0", - }, - } - ] - # SEE https://docs.docker.com/engine/api/v1.41/#operation/ServiceCreate docker_params: dict[str, Any] = { "auth": ( @@ -586,7 +562,7 @@ async def _pass_port_to_service( ) service_url = "http://" + service_name + "/" + route # NOSONAR query_string = { - "hostname": str(app_settings.DIRECTOR_PUBLISHED_HOST_NAME), + "hostname": app_settings.DIRECTOR_PUBLISHED_HOST_NAME, "port": str(port), } _logger.debug("creating request %s and query %s", service_url, query_string) diff --git a/services/director/src/simcore_service_director/system_utils.py b/services/director/src/simcore_service_director/system_utils.py deleted file mode 100644 index ce666108dd2..00000000000 --- a/services/director/src/simcore_service_director/system_utils.py +++ /dev/null @@ -1,15 +0,0 @@ -from pathlib import Path - - -def get_system_extra_hosts_raw(extra_host_domain: str) -> list[str]: - extra_hosts = [] - hosts_path = Path("/etc/hosts") - if hosts_path.exists() and extra_host_domain != "undefined": - with hosts_path.open() as hosts: - extra_hosts = [ - line.strip().replace("\t", " ") - for line in hosts - if extra_host_domain in line - ] - - return extra_hosts diff --git a/services/director/tests/unit/test_core_settings.py b/services/director/tests/unit/test_core_settings.py index 6cd0f49ff84..3f4ea62a480 100644 --- a/services/director/tests/unit/test_core_settings.py +++ b/services/director/tests/unit/test_core_settings.py @@ -5,7 +5,10 @@ import pytest -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_envfile +from pytest_simcore.helpers.monkeypatch_envs import ( + setenvs_from_dict, + setenvs_from_envfile, +) from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_director.core.settings import ApplicationSettings @@ -24,19 +27,21 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): assert settings == ApplicationSettings.create_from_envs() assert ( - app_environment["DIRECTOR_DEFAULT_MAX_MEMORY"] + str( + app_environment.get( + "DIRECTOR_DEFAULT_MAX_MEMORY", + ApplicationSettings.__fields__["DIRECTOR_DEFAULT_MAX_MEMORY"].default, + ) + ) == f"{settings.DIRECTOR_DEFAULT_MAX_MEMORY}" ) -@pytest.mark.skip(reason="under dev") -def test_mytests(monkeypatch: pytest.MonkeyPatch): +def test_docker_container_env_sample(monkeypatch: pytest.MonkeyPatch): setenvs_from_envfile( monkeypatch, """ - DEFAULT_MAX_MEMORY=0 - DEFAULT_MAX_NANO_CPUS=0 DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS={} DIRECTOR_REGISTRY_CACHING=True DIRECTOR_REGISTRY_CACHING_TTL=900 @@ -63,22 +68,22 @@ def test_mytests(monkeypatch: pytest.MonkeyPatch): PWD=/home/scu PYTHONDONTWRITEBYTECODE=1 PYTHONOPTIMIZE=TRUE - PYTHON_GET_PIP_SHA256=b3153ec0cf7b7bbf9556932aa37e4981c35dc2a2c501d70d91d2795aa532be79 + PYTHON_GET_PIP_SHA256=adsfasdf PYTHON_GET_PIP_URL=https://github.com/pypa/get-pip/raw/eff16c878c7fd6b688b9b4c4267695cf1a0bf01b/get-pip.py PYTHON_PIP_VERSION=20.1.1 PYTHON_VERSION=3.6.10 REGISTRY_AUTH=True REGISTRY_PATH= - REGISTRY_PW=adminadminadmin + REGISTRY_PW=adsfasdf REGISTRY_SSL=True REGISTRY_URL=registry.osparc-master.speag.com REGISTRY_USER=admin REGISTRY_VERSION=v2 - S3_ACCESS_KEY=YE2F1H88P2Z51GYX7HCV + S3_ACCESS_KEY=adsfasdf S3_BUCKET_NAME=master-simcore S3_ENDPOINT=https://ceph-prod-rgw.speag.com S3_REGION=us-east-1 - S3_SECRET_KEY=7CXBx2HTy6NrVPClatbB6bWZcM1zx782Y7mAaoPs + S3_SECRET_KEY=asdf SC_BOOT_MODE=production SC_BUILD_TARGET=production SC_USER_ID=8004 @@ -92,7 +97,56 @@ def test_mytests(monkeypatch: pytest.MonkeyPatch): TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE=50 TRAEFIK_SIMCORE_ZONE=master_internal_simcore_stack VIRTUAL_ENV=/home/scu/.venv + LOG_FORMAT_LOCAL_DEV_ENABLED=1 """, ) settings = ApplicationSettings.create_from_envs() + + assert settings.DIRECTOR_DEFAULT_MAX_MEMORY == 0, "default!" + + +def test_docker_compose_environment_sample( + monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict +): + + setenvs_from_dict( + monkeypatch, + { + **app_environment, + "DEFAULT_MAX_MEMORY": "0", + "DEFAULT_MAX_NANO_CPUS": "0", + "DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS": '{"VRAM": "node.labels.gpu==true"}', + "DIRECTOR_REGISTRY_CACHING": "True", + "DIRECTOR_REGISTRY_CACHING_TTL": "900", + "DIRECTOR_SELF_SIGNED_SSL_FILENAME": "", + "DIRECTOR_SELF_SIGNED_SSL_SECRET_ID": "", + "DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME": "", + "DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS": "", + "DIRECTOR_TRACING": "{}", + "EXTRA_HOSTS_SUFFIX": "undefined", + "LOGLEVEL": "DEBUG", + "MONITORING_ENABLED": "True", + "POSTGRES_DB": "simcoredb", + "POSTGRES_ENDPOINT": "osparc-dev.foo.com:5432", + "POSTGRES_HOST": "osparc-dev.foo.com", + "POSTGRES_PASSWORD": "adsfasdf", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "postgres", + "PUBLISHED_HOST_NAME": "osparc-master-zmt.click", + "REGISTRY_AUTH": "True", + "REGISTRY_PATH": "", + "REGISTRY_PW": "asdf", + "REGISTRY_SSL": "True", + "REGISTRY_URL": "registry.osparc-master-zmt.click", + "REGISTRY_USER": "admin", + "SIMCORE_SERVICES_NETWORK_NAME": "master-simcore_interactive_services_subnet", + "STORAGE_ENDPOINT": "master_storage:8080", + "SWARM_STACK_NAME": "master-simcore", + "TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT": "http://jaeger:4318", + "TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE": "50", + "TRAEFIK_SIMCORE_ZONE": "master_internal_simcore_stack", + }, + ) + + settings = ApplicationSettings.create_from_envs() diff --git a/services/director/tests/unit/test_system_utils.py b/services/director/tests/unit/test_system_utils.py deleted file mode 100644 index 522549cf561..00000000000 --- a/services/director/tests/unit/test_system_utils.py +++ /dev/null @@ -1,46 +0,0 @@ -from unittest.mock import mock_open, patch - -from simcore_service_director.system_utils import get_system_extra_hosts_raw - - -# Sample tests -def test_get_system_extra_hosts_raw_with_matching_domain(): - # Simulate the contents of /etc/hosts - mocked_hosts_content = "127.0.0.1\tlocalhost\n192.168.1.1\texample.com\n" - extra_host_domain = "example.com" - - with patch("pathlib.Path.open", mock_open(read_data=mocked_hosts_content)), patch( - "pathlib.Path.exists", return_value=True - ): - result = get_system_extra_hosts_raw(extra_host_domain) - assert result == ["192.168.1.1 example.com"] - - -def test_get_system_extra_hosts_raw_with_no_matching_domain(): - mocked_hosts_content = "127.0.0.1\tlocalhost\n192.168.1.1\texample.com\n" - extra_host_domain = "nonexistent.com" - - with patch("pathlib.Path.open", mock_open(read_data=mocked_hosts_content)), patch( - "pathlib.Path.exists", return_value=True - ): - result = get_system_extra_hosts_raw(extra_host_domain) - assert result == [] - - -def test_get_system_extra_hosts_raw_with_undefined_domain(): - mocked_hosts_content = "127.0.0.1\tlocalhost\n192.168.1.1\texample.com\n" - extra_host_domain = "undefined" - - with patch("pathlib.Path.open", mock_open(read_data=mocked_hosts_content)), patch( - "pathlib.Path.exists", return_value=True - ): - result = get_system_extra_hosts_raw(extra_host_domain) - assert result == [] - - -def test_get_system_extra_hosts_raw_with_no_hosts_file(): - extra_host_domain = "example.com" - - with patch("pathlib.Path.exists", return_value=False): - result = get_system_extra_hosts_raw(extra_host_domain) - assert result == [] diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 5e57dcd04d8..58ed51fbc10 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -258,34 +258,28 @@ services: DIRECTOR_PUBLISHED_HOST_NAME: ${DIRECTOR_PUBLISHED_HOST_NAME} DIRECTOR_REGISTRY_CACHING_TTL: ${DIRECTOR_REGISTRY_CACHING_TTL} DIRECTOR_REGISTRY_CACHING: ${DIRECTOR_REGISTRY_CACHING} - DIRECTOR_SELF_SIGNED_SSL_FILENAME: ${DIRECTOR_SELF_SIGNED_SSL_FILENAME} - DIRECTOR_SELF_SIGNED_SSL_SECRET_ID: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_ID} - DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME: ${DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME} DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: ${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} DIRECTOR_TRACING: ${DIRECTOR_TRACING} - EXTRA_HOSTS_SUFFIX: undefined + POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} POSTGRES_HOST: ${POSTGRES_HOST} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} POSTGRES_PORT: ${POSTGRES_PORT} POSTGRES_USER: ${POSTGRES_USER} + REGISTRY_AUTH: ${REGISTRY_AUTH} - REGISTRY_PATH: ${REGISTRY_PATH} REGISTRY_PW: ${REGISTRY_PW} REGISTRY_SSL: ${REGISTRY_SSL} REGISTRY_URL: ${REGISTRY_URL} REGISTRY_USER: ${REGISTRY_USER} - S3_ACCESS_KEY: ${S3_ACCESS_KEY} - S3_BUCKET_NAME: ${S3_BUCKET_NAME} - S3_ENDPOINT: ${S3_ENDPOINT} - S3_REGION: ${S3_REGION} - S3_SECRET_KEY: ${S3_SECRET_KEY} + SIMCORE_SERVICES_NETWORK_NAME: interactive_services_subnet STORAGE_ENDPOINT: ${STORAGE_ENDPOINT} - SWARM_STACK_NAME: ${SWARM_STACK_NAME:-simcore} - TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} + SWARM_STACK_NAME: ${SWARM_STACK_NAME} + + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} + TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack} volumes: - "/var/run/docker.sock:/var/run/docker.sock" From fbc1f75806ed8d34b98dfca606472cbf7d04c97b Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 15:21:32 +0100 Subject: [PATCH 186/201] fixes dockercompose env fixture --- .../src/pytest_simcore/environment_configs.py | 4 ++-- .../src/simcore_service_director/core/settings.py | 8 +++----- services/docker-compose.yml | 2 +- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index 33a38db4c7e..e8b34d4aa1b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -102,7 +102,7 @@ def docker_compose_service_environment_dict( """ service = services_docker_compose_dict["services"][service_name] - def _substitute(key, value): + def _substitute(key, value) -> tuple[str, str]: if m := re.match(r"\${([^{}:-]\w+)", value): expected_env_var = m.group(1) try: @@ -114,7 +114,7 @@ def _substitute(key, value): pytest.fail( f"{expected_env_var} is not defined in {env_devel_file} but used in docker-compose services[{service}].environment[{key}]" ) - return None + return key, value envs: EnvVarsDict = {} for key, value in service.get("environment", {}).items(): diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index cf626ad6bd2..55ce270cc02 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -72,7 +72,6 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): auto_default_from_env=True, description="settings for opentelemetry tracing" ) - # migrated settings DIRECTOR_DEFAULT_MAX_NANO_CPUS: NonNegativeInt = Field(default=0) DIRECTOR_DEFAULT_MAX_MEMORY: NonNegativeInt = Field(default=0) DIRECTOR_REGISTRY_CACHING: bool = Field( @@ -91,7 +90,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): DIRECTOR_SERVICES_STATE_MONITOR_S: int = 8 DIRECTOR_TRAEFIK_SIMCORE_ZONE: str = Field( - default="internal_simcore_stack", + ..., env=["DIRECTOR_TRAEFIK_SIMCORE_ZONE", "TRAEFIK_SIMCORE_ZONE"], ) @@ -112,12 +111,11 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): env=["DIRECTOR_SWARM_STACK_NAME", "SWARM_STACK_NAME"], ) - # used to find the right network name DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME: str | None = Field( - default=None, + # used to find the right network name + ..., env=["DIRECTOR_SIMCORE_SERVICES_NETWORK_NAME", "SIMCORE_SERVICES_NETWORK_NAME"], ) - # useful when developing with an alternative registry namespace DIRECTOR_MONITORING_ENABLED: bool = Field( ..., env=["DIRECTOR_MONITORING_ENABLED", "MONITORING_ENABLED"] diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 58ed51fbc10..7629b6f7373 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -280,7 +280,7 @@ services: TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} - TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack} + TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE} volumes: - "/var/run/docker.sock:/var/run/docker.sock" deploy: From d0e7a06e7ee3a485a6effd34cc4cf30e3a9340e7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 15:35:39 +0100 Subject: [PATCH 187/201] doc --- .env-devel | 2 +- .../simcore_service_director/core/settings.py | 35 +++---------------- .../registry_proxy.py | 10 ++++-- 3 files changed, 13 insertions(+), 34 deletions(-) diff --git a/.env-devel b/.env-devel index fb513496b85..c8850bd848f 100644 --- a/.env-devel +++ b/.env-devel @@ -77,7 +77,7 @@ DIRECTOR_LOGLEVEL=INFO DIRECTOR_MONITORING_ENABLED=True DIRECTOR_PORT=8000 DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081" -DIRECTOR_REGISTRY_CACHING_TTL=900 +DIRECTOR_REGISTRY_CACHING_TTL=00:15:00 DIRECTOR_REGISTRY_CACHING=True DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null DIRECTOR_TRACING={} diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 55ce270cc02..e0a352aed82 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -3,16 +3,10 @@ from typing import cast from fastapi import FastAPI -from models_library.basic_types import ( - BootModeEnum, - BuildTargetEnum, - LogLevel, - PortInt, - VersionTag, -) -from pydantic import Field, NonNegativeInt, PositiveInt, validator +from models_library.basic_types import LogLevel, PortInt, VersionTag +from pydantic import Field, NonNegativeInt, validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring -from settings_library.base import BaseCustomSettings +from settings_library.application import BaseApplicationSettings from settings_library.docker_registry import RegistrySettings from settings_library.postgres import PostgresSettings from settings_library.tracing import TracingSettings @@ -21,32 +15,11 @@ from .._meta import API_VERSION, API_VTAG, APP_NAME -class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): - # CODE STATICS --------------------------------------------------------- +class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): API_VERSION: str = API_VERSION APP_NAME: str = APP_NAME API_VTAG: VersionTag = API_VTAG - # IMAGE BUILDTIME ------------------------------------------------------ - # @Makefile - SC_BUILD_DATE: str | None = None - SC_BUILD_TARGET: BuildTargetEnum | None = None - SC_VCS_REF: str | None = None - SC_VCS_URL: str | None = None - - # @Dockerfile - SC_BOOT_MODE: BootModeEnum | None = None - SC_BOOT_TARGET: BuildTargetEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) - SC_USER_ID: int | None = None - SC_USER_NAME: str | None = None - - # RUNTIME ----------------------------------------------------------- DIRECTOR_DEBUG: bool = Field( default=False, description="Debug mode", env=["DIRECTOR_DEBUG", "DEBUG"] ) diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 0994292ab99..f45f3b96348 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -42,6 +42,11 @@ logger = logging.getLogger(__name__) +# +# NOTE: if you are refactoring this module, +# please consider reusing packages/pytest-simcore/src/pytest_simcore/helpers/docker_registry.py +# + class ServiceType(enum.Enum): ALL = "" @@ -375,10 +380,11 @@ async def get_image_details( async def get_repo_details(app: FastAPI, image_key: str) -> list[dict[str, Any]]: image_tags = await list_image_tags(app, image_key) + results = await limited_gather( *[get_image_details(app, image_key, tag) for tag in image_tags], reraise=False, - _logger=logger, + log=logger, limit=_MAX_CONCURRENT_CALLS, ) return [result for result in results if not isinstance(result, BaseException)] @@ -400,7 +406,7 @@ async def list_services(app: FastAPI, service_type: ServiceType) -> list[dict]: results = await limited_gather( *[get_repo_details(app, repo) for repo in repos], reraise=False, - _logger=logger, + log=logger, limit=_MAX_CONCURRENT_CALLS, ) From a501847c5e4f20fcc893487bbccdbb3d48b5043e Mon Sep 17 00:00:00 2001 From: pcrespov <32402063+pcrespov@users.noreply.github.com> Date: Thu, 14 Nov 2024 23:00:13 +0100 Subject: [PATCH 188/201] @mrnicegyu11 review: updates 3.11 --- packages/aws-library/setup.py | 2 +- packages/dask-task-models-library/setup.py | 2 +- packages/models-library/setup.py | 2 +- packages/notifications-library/setup.py | 2 +- packages/pytest-simcore/setup.py | 2 +- packages/service-integration/setup.py | 2 +- packages/service-library/setup.py | 2 +- packages/simcore-sdk/setup.py | 2 +- services/agent/setup.py | 2 +- services/api-server/setup.py | 2 +- services/autoscaling/setup.py | 2 +- services/catalog/setup.py | 2 +- services/clusters-keeper/setup.py | 2 +- services/dask-sidecar/setup.py | 2 +- services/datcore-adapter/setup.py | 2 +- services/director-v2/setup.py | 2 +- services/director/setup.py | 2 +- services/dynamic-scheduler/setup.py | 2 +- services/dynamic-sidecar/setup.py | 2 +- services/efs-guardian/setup.py | 2 +- services/invitations/setup.py | 2 +- services/osparc-gateway-server/setup.py | 2 +- services/payments/setup.py | 2 +- services/resource-usage-tracker/setup.py | 2 +- services/storage/setup.py | 2 +- services/web/server/setup.py | 2 +- 26 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/aws-library/setup.py b/packages/aws-library/setup.py index 32b5ea71e93..30ef0981954 100644 --- a/packages/aws-library/setup.py +++ b/packages/aws-library/setup.py @@ -33,7 +33,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Sylvain Anderegg (sanderegg)", "description": "Core service library for AWS APIs", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/dask-task-models-library/setup.py b/packages/dask-task-models-library/setup.py index f6a1e1c1f45..159e9aabdc7 100644 --- a/packages/dask-task-models-library/setup.py +++ b/packages/dask-task-models-library/setup.py @@ -36,7 +36,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Sylvain Anderegg (sanderegg)", "description": "Core service library for simcore pydantic dask task models", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/models-library/setup.py b/packages/models-library/setup.py index f93dd240462..439d6b7fc41 100644 --- a/packages/models-library/setup.py +++ b/packages/models-library/setup.py @@ -34,7 +34,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Sylvain Anderegg (sanderegg)", "description": "Core service library for simcore pydantic models", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/notifications-library/setup.py b/packages/notifications-library/setup.py index 5488afbbcc5..2a94424c29a 100644 --- a/packages/notifications-library/setup.py +++ b/packages/notifications-library/setup.py @@ -38,7 +38,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Pedro Crespo-Valero (pcrespov)", "description": "simcore library for user notifications e.g. emails, sms, etc", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "classifiers": [ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", diff --git a/packages/pytest-simcore/setup.py b/packages/pytest-simcore/setup.py index aeeaca60b0e..3bfff70d2ee 100644 --- a/packages/pytest-simcore/setup.py +++ b/packages/pytest-simcore/setup.py @@ -17,7 +17,7 @@ "description": "pytest plugin with fixtures and test helpers for osparc-simcore repo modules", "py_modules": ["pytest_simcore"], # WARNING: this is used in frozen services as well !!!! - "python_requires": ">=3.6", + "python_requires": "~=3.11", "install_requires": ["pytest>=3.5.0"], "extras_require": { "all": [ diff --git a/packages/service-integration/setup.py b/packages/service-integration/setup.py index dac440b0f9d..4dc34180e23 100644 --- a/packages/service-integration/setup.py +++ b/packages/service-integration/setup.py @@ -64,7 +64,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "Framework :: Pytest", ], "long_description": Path(CURRENT_DIR / "README.md").read_text(), - "python_requires": ">=3.6", + "python_requires": "~=3.11", "license": "MIT license", "install_requires": INSTALL_REQUIREMENTS, "packages": find_packages(where="src"), diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index ced858bd59d..521b491b918 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -35,7 +35,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "author": "Pedro Crespo-Valero (pcrespov)", "description": "Core service library for simcore (or servicelib)", "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "install_requires": tuple(PROD_REQUIREMENTS), "packages": find_packages(where="src"), "package_data": {"": ["py.typed"]}, diff --git a/packages/simcore-sdk/setup.py b/packages/simcore-sdk/setup.py index cd763f3ac3a..cb7c8f88edf 100644 --- a/packages/simcore-sdk/setup.py +++ b/packages/simcore-sdk/setup.py @@ -36,7 +36,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "packages": find_packages(where="src"), "package_data": {"": ["py.typed"]}, "package_dir": {"": "src"}, - "python_requires": ">=3.6", + "python_requires": "~=3.11", "install_requires": INSTALL_REQUIREMENTS, "tests_require": TEST_REQUIREMENTS, "extras_require": {"test": TEST_REQUIREMENTS}, diff --git a/services/agent/setup.py b/services/agent/setup.py index 22c1b7ae1d7..6745e6db31d 100755 --- a/services/agent/setup.py +++ b/services/agent/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/api-server/setup.py b/services/api-server/setup.py index 8acef0b60b1..ca8eeb93434 100755 --- a/services/api-server/setup.py +++ b/services/api-server/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/autoscaling/setup.py b/services/autoscaling/setup.py index 2cdde15f3ed..516747aa082 100755 --- a/services/autoscaling/setup.py +++ b/services/autoscaling/setup.py @@ -49,7 +49,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/catalog/setup.py b/services/catalog/setup.py index 9aae69d9a65..cd21924e00c 100644 --- a/services/catalog/setup.py +++ b/services/catalog/setup.py @@ -43,7 +43,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": "Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)", "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/clusters-keeper/setup.py b/services/clusters-keeper/setup.py index df644386545..1caa625e8b6 100755 --- a/services/clusters-keeper/setup.py +++ b/services/clusters-keeper/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/dask-sidecar/setup.py b/services/dask-sidecar/setup.py index 3299eab5a6c..ed223f163a6 100644 --- a/services/dask-sidecar/setup.py +++ b/services/dask-sidecar/setup.py @@ -36,7 +36,7 @@ def read_reqs(reqs_path: Path) -> set[str]: ], "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/datcore-adapter/setup.py b/services/datcore-adapter/setup.py index c08158cf963..c9f49735bd0 100644 --- a/services/datcore-adapter/setup.py +++ b/services/datcore-adapter/setup.py @@ -37,7 +37,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": "Interfaces with datcore storage", "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/director-v2/setup.py b/services/director-v2/setup.py index d1f9ffa19ec..0f8aa66e5cc 100644 --- a/services/director-v2/setup.py +++ b/services/director-v2/setup.py @@ -47,7 +47,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": "Orchestrates the pipeline of services defined by the user", "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/director/setup.py b/services/director/setup.py index 4522abb4e1d..9577a7ffc86 100644 --- a/services/director/setup.py +++ b/services/director/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/dynamic-scheduler/setup.py b/services/dynamic-scheduler/setup.py index 5ca9677bd20..6cde52f1491 100755 --- a/services/dynamic-scheduler/setup.py +++ b/services/dynamic-scheduler/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/dynamic-sidecar/setup.py b/services/dynamic-sidecar/setup.py index 83a6b2abc06..a980edda233 100644 --- a/services/dynamic-sidecar/setup.py +++ b/services/dynamic-sidecar/setup.py @@ -49,7 +49,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "": "src", }, "include_package_data": True, - "python_requires": "~=3.10", + "python_requires": "~=3.11", "PROD_REQUIREMENTS": PROD_REQUIREMENTS, "TEST_REQUIREMENTS": TEST_REQUIREMENTS, "setup_requires": ["setuptools_scm"], diff --git a/services/efs-guardian/setup.py b/services/efs-guardian/setup.py index ed3f29fc23b..1a0f0921ee3 100755 --- a/services/efs-guardian/setup.py +++ b/services/efs-guardian/setup.py @@ -46,7 +46,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/invitations/setup.py b/services/invitations/setup.py index 4f053c504ac..9e9cf583c0c 100755 --- a/services/invitations/setup.py +++ b/services/invitations/setup.py @@ -44,7 +44,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/osparc-gateway-server/setup.py b/services/osparc-gateway-server/setup.py index 531804628f8..c3a7becc072 100755 --- a/services/osparc-gateway-server/setup.py +++ b/services/osparc-gateway-server/setup.py @@ -37,7 +37,7 @@ def read_reqs(reqs_path: Path) -> set[str]: ], "long_description": (CURRENT_DIR / "README.md").read_text(), "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/payments/setup.py b/services/payments/setup.py index 234334fa2ab..c1f3fa64313 100755 --- a/services/payments/setup.py +++ b/services/payments/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/resource-usage-tracker/setup.py b/services/resource-usage-tracker/setup.py index ce2abba82a1..26afa3e2183 100755 --- a/services/resource-usage-tracker/setup.py +++ b/services/resource-usage-tracker/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "description": DESCRIPTION, "long_description": README, "license": "MIT license", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": { "": "src", diff --git a/services/storage/setup.py b/services/storage/setup.py index 792ff4bebcd..2a0ca0d9c41 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -38,7 +38,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "description": "Service to manage data storage in simcore", "author": "Manuel Guidon (mguidon)", - "python_requires": "~=3.10", + "python_requires": "~=3.11", "packages": find_packages(where="src"), "package_dir": {"": "src"}, "include_package_data": True, diff --git a/services/web/server/setup.py b/services/web/server/setup.py index aba3c322d87..57ada0bc03b 100644 --- a/services/web/server/setup.py +++ b/services/web/server/setup.py @@ -60,7 +60,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "simcore-service=simcore_service_webserver.__main__:main", ] }, - "python_requires": "~=3.10", + "python_requires": "~=3.11", "install_requires": INSTALL_REQUIREMENTS, "tests_require": TEST_REQUIREMENTS, "setup_requires": ["pytest-runner"], From 4e390911e052445f79a2e807a8868242c89de2ad Mon Sep 17 00:00:00 2001 From: pcrespov <32402063+pcrespov@users.noreply.github.com> Date: Thu, 14 Nov 2024 23:14:52 +0100 Subject: [PATCH 189/201] missing upgrade --- services/director/requirements/_base.txt | 53 +++++++++++++---------- services/director/requirements/_test.txt | 9 ++-- services/director/requirements/_tools.txt | 10 ++--- 3 files changed, 41 insertions(+), 31 deletions(-) diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index 1382ab713d0..f88e7c85550 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -14,7 +14,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.4.3 # via aiohttp -aiohttp==3.10.10 +aiohttp==3.11.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -88,13 +88,13 @@ fastapi==0.99.1 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # prometheus-fastapi-instrumentator -faststream==0.5.28 +faststream==0.5.30 # via -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.66.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -155,7 +155,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.0 +opentelemetry-api==1.28.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -163,56 +163,62 @@ opentelemetry-api==1.28.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.0 +opentelemetry-exporter-otlp==1.28.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.0 +opentelemetry-exporter-otlp-proto-common==1.28.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.0 +opentelemetry-exporter-otlp-proto-grpc==1.28.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.0 +opentelemetry-exporter-otlp-proto-http==1.28.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b0 +opentelemetry-instrumentation==0.49b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-asgi==0.49b0 +opentelemetry-instrumentation-asgi==0.49b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.49b0 +opentelemetry-instrumentation-fastapi==0.49b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-redis==0.49b0 +opentelemetry-instrumentation-httpx==0.49b1 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-redis==0.49b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b0 +opentelemetry-instrumentation-requests==0.49b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.0 +opentelemetry-proto==1.28.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.0 +opentelemetry-sdk==1.28.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b0 +opentelemetry-semantic-conventions==0.49b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b0 +opentelemetry-util-http==0.49b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.11 # via @@ -225,7 +231,7 @@ orjson==3.10.11 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # fastapi -packaging==24.1 +packaging==24.2 # via opentelemetry-instrumentation pamqp==3.3.0 # via aiormq @@ -237,7 +243,9 @@ prometheus-client==0.21.0 prometheus-fastapi-instrumentator==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in propcache==0.2.0 - # via yarl + # via + # aiohttp + # yarl protobuf==5.28.3 # via # googleapis-common-protos @@ -332,7 +340,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.5 +typer==0.13.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -372,17 +380,18 @@ uvloop==0.21.0 # via uvicorn watchfiles==0.24.0 # via uvicorn -websockets==13.1 +websockets==14.1 # via uvicorn wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis yarl==1.17.1 # via # aio-pika # aiohttp # aiormq -zipp==3.20.2 +zipp==3.21.0 # via importlib-metadata diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 52e2969fff0..8d14d466266 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.4.3 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.10.10 +aiohttp==3.11.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -35,11 +35,11 @@ charset-normalizer==3.4.0 # via # -c requirements/_base.txt # requests -coverage==7.6.4 +coverage==7.6.5 # via pytest-cov docker==7.1.0 # via -r requirements/_test.in -faker==30.8.2 +faker==33.0.0 # via -r requirements/_test.in frozenlist==1.5.0 # via @@ -75,7 +75,7 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -packaging==24.1 +packaging==24.2 # via # -c requirements/_base.txt # pytest @@ -85,6 +85,7 @@ pluggy==1.5.0 propcache==0.2.0 # via # -c requirements/_base.txt + # aiohttp # yarl pytest==8.3.3 # via diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index 4270bf693f1..815963069c8 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -19,7 +19,7 @@ distlib==0.3.9 # via virtualenv filelock==3.16.1 # via virtualenv -identify==2.6.1 +identify==2.6.2 # via pre-commit isort==5.13.2 # via @@ -35,7 +35,7 @@ mypy-extensions==1.0.0 # mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -66,9 +66,9 @@ pyyaml==6.0.2 # -c requirements/_base.txt # pre-commit # watchdog -ruff==0.7.2 +ruff==0.7.3 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.3.0 +setuptools==75.5.0 # via pip-tools tomlkit==0.13.2 # via pylint @@ -81,5 +81,5 @@ virtualenv==20.27.1 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in -wheel==0.44.0 +wheel==0.45.0 # via pip-tools From 726ba0c1327e16f838f0dc831e8e57ad366159f1 Mon Sep 17 00:00:00 2001 From: pcrespov <32402063+pcrespov@users.noreply.github.com> Date: Fri, 15 Nov 2024 01:09:12 +0100 Subject: [PATCH 190/201] rm envs --- .github/workflows/ci-testing-deploy.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 378df3d218c..45364b8409b 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -36,11 +36,6 @@ on: - system-tests - all -env: - DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 - DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 - COLUMNS: 120 - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true From 10c315726ca568e2bcedf0f7e6ae791ba99f7901 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 15 Nov 2024 13:33:14 +0100 Subject: [PATCH 191/201] env vars --- .github/workflows/ci-testing-deploy.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 378df3d218c..14f9c285b0a 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -37,8 +37,8 @@ on: - all env: - DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 - DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 + # NOTE: 'COLUMNS' is a shell env var that represents the width (number of columns) + # of the terminal or command-line interface in characters. COLUMNS: 120 concurrency: @@ -2146,6 +2146,9 @@ jobs: python: ["3.11"] os: [ubuntu-22.04] fail-fast: false + env: + DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 + DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 steps: - uses: actions/checkout@v4 - name: setup docker buildx From c202c3da32ede7922ba4b1ee1d191ea01665e9dd Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:53:59 +0100 Subject: [PATCH 192/201] envs --- .github/workflows/ci-testing-deploy.yml | 6 +++--- services/director/tests/unit/test_core_settings.py | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 14f9c285b0a..44e8016ce37 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -37,6 +37,9 @@ on: - all env: + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service + DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 + DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 # NOTE: 'COLUMNS' is a shell env var that represents the width (number of columns) # of the terminal or command-line interface in characters. COLUMNS: 120 @@ -2146,9 +2149,6 @@ jobs: python: ["3.11"] os: [ubuntu-22.04] fail-fast: false - env: - DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 - DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 steps: - uses: actions/checkout@v4 - name: setup docker buildx diff --git a/services/director/tests/unit/test_core_settings.py b/services/director/tests/unit/test_core_settings.py index 3f4ea62a480..5ac622ba668 100644 --- a/services/director/tests/unit/test_core_settings.py +++ b/services/director/tests/unit/test_core_settings.py @@ -38,6 +38,7 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): def test_docker_container_env_sample(monkeypatch: pytest.MonkeyPatch): + monkeypatch.delenv("DIRECTOR_DEFAULT_MAX_MEMORY", raising=False) setenvs_from_envfile( monkeypatch, From 08a837c303686f0caee9315b54e2b828e917fec2 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Sat, 16 Nov 2024 17:16:24 +0100 Subject: [PATCH 193/201] minor cleanup --- .github/workflows/ci-testing-deploy.yml | 13 +++- ...ixed_dynamic_sidecar_and_legacy_project.py | 65 ++++++++++--------- 2 files changed, 44 insertions(+), 34 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 44e8016ce37..0ffc79d86a4 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -37,9 +37,6 @@ on: - all env: - # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service - DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 - DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 # NOTE: 'COLUMNS' is a shell env var that represents the width (number of columns) # of the terminal or command-line interface in characters. COLUMNS: 120 @@ -2086,6 +2083,11 @@ jobs: python: ["3.11"] os: [ubuntu-22.04] fail-fast: false + env: + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service + DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 + DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 + DIRECTOR_TRACING: null steps: - uses: actions/checkout@v4 - name: setup docker buildx @@ -2149,6 +2151,11 @@ jobs: python: ["3.11"] os: [ubuntu-22.04] fail-fast: false + env: + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service + DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 + DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 + DIRECTOR_TRACING: null steps: - uses: actions/checkout@v4 - name: setup docker buildx diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 4d7c348a336..f2f83e4e022 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -1,13 +1,13 @@ -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments -# pylint:disable=too-many-positional-arguments -# pylint:disable=unused-argument +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments import asyncio import logging from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable from contextlib import asynccontextmanager -from typing import Any +from typing import Any, cast from unittest import mock import aiodocker @@ -46,8 +46,8 @@ "migration", "postgres", "rabbit", - "storage", "redis", + "storage", ] pytest_simcore_ops_services_selection = [ @@ -79,29 +79,32 @@ def mock_env( catalog_port = services_endpoint["catalog"].port assert catalog_port - env_vars: EnvVarsDict = { - "DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS": "{}", - "TRAEFIK_SIMCORE_ZONE": "test_traefik_zone", - "SWARM_STACK_NAME": "pytest-simcore", - "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", - "SC_BOOT_MODE": "production", - "DYNAMIC_SIDECAR_EXPOSE_PORT": "true", - "PROXY_EXPOSE_PORT": "true", - "SIMCORE_SERVICES_NETWORK_NAME": network_name, - "DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED": "true", - "POSTGRES_HOST": f"{get_localhost_ip()}", - "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "false", - "COMPUTATIONAL_BACKEND_ENABLED": "false", - "R_CLONE_PROVIDER": "MINIO", - "DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED": "1", - "DIRECTOR_HOST": director_host, - "DIRECTOR_PORT": f"{director_port}", - "CATALOG_HOST": catalog_host, - "CATALOG_PORT": f"{catalog_port}", - } - setenvs_from_dict(monkeypatch, env_vars) monkeypatch.delenv("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", raising=False) - return mock_env | env_vars + mock_env.pop("DYNAMIC_SIDECAR_MOUNT_PATH_DEV", None) + + return mock_env | setenvs_from_dict( + monkeypatch, + { + "DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS": "{}", + "TRAEFIK_SIMCORE_ZONE": "test_traefik_zone", + "SWARM_STACK_NAME": "pytest-simcore", + "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", + "SC_BOOT_MODE": "production", + "DYNAMIC_SIDECAR_EXPOSE_PORT": "true", + "PROXY_EXPOSE_PORT": "true", + "SIMCORE_SERVICES_NETWORK_NAME": network_name, + "DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED": "true", + "POSTGRES_HOST": f"{get_localhost_ip()}", + "COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED": "false", + "COMPUTATIONAL_BACKEND_ENABLED": "false", + "R_CLONE_PROVIDER": "MINIO", + "DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED": "1", + "DIRECTOR_HOST": director_host, + "DIRECTOR_PORT": f"{director_port}", + "CATALOG_HOST": catalog_host, + "CATALOG_PORT": f"{catalog_port}", + }, + ) @pytest.fixture @@ -117,17 +120,17 @@ def minimal_configuration( @pytest.fixture def uuid_legacy(faker: Faker) -> str: - return faker.uuid4() + return cast(str, faker.uuid4()) @pytest.fixture def uuid_dynamic_sidecar(faker: Faker) -> str: - return faker.uuid4() + return cast(str, faker.uuid4()) @pytest.fixture def uuid_dynamic_sidecar_compose(faker: Faker) -> str: - return faker.uuid4() + return cast(str, faker.uuid4()) @pytest.fixture From 309e2717f511f36b6259e40e2b5bae397f715780 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 14:27:22 +0100 Subject: [PATCH 194/201] disables tracing for integration tests --- .github/workflows/ci-testing-deploy.yml | 5 --- .../src/pytest_simcore/docker_compose.py | 44 ++++++++++++------- ...ixed_dynamic_sidecar_and_legacy_project.py | 3 +- tests/swarm-deploy/conftest.py | 6 +-- 4 files changed, 32 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 0ffc79d86a4..e143ec83976 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -2083,11 +2083,6 @@ jobs: python: ["3.11"] os: [ubuntu-22.04] fail-fast: false - env: - # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service - DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 - DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 - DIRECTOR_TRACING: null steps: - uses: actions/checkout@v4 - name: setup docker buildx diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 581fcf30187..2da02b3ade9 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -45,10 +45,13 @@ def temp_folder( @pytest.fixture(scope="session") -def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: +def env_vars_for_docker_compose(env_devel_file: Path) -> EnvVarsDict: """ - Loads and extends .env-devel returning - all environment variables key=value + Loads and extends .env-devel returning all environment variables key=value + + + NOTE: that these are then env-vars used in the services started in the + integration tests! """ env_devel = dotenv_values( env_devel_file, @@ -73,16 +76,23 @@ def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: env_devel[ "AIOCACHE_DISABLE" - ] = "1" # ensure that aio-caches are disabled for testing [https://aiocache.readthedocs.io/en/latest/testing.html] + # ensure that aio-caches are disabled for testing [https://aiocache.readthedocs.io/en/latest/testing.html] + ] = "1" env_devel[ "CATALOG_BACKGROUND_TASK_REST_TIME" - ] = "1" # ensure catalog refreshes services access rights fast + # ensure catalog refreshes services access rights fast + ] = "1" + # DIRECTOR ------------------- env_devel["DIRECTOR_REGISTRY_CACHING"] = "False" + # NOTE: this will make TracingSettings fail and therefore the default factory of every *_TRACING field will be set to None + env_devel["TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT"] = "null" + env_devel["TRACING_OPENTELEMETRY_COLLECTOR_PORT"] = "null" + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service + env_devel["DIRECTOR_DEFAULT_MAX_MEMORY"] = "268435456" + env_devel["DIRECTOR_DEFAULT_MAX_NANO_CPUS"] = "10000000" + env_devel.setdefault("DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS", "") - env_devel.setdefault("DIRECTOR_SELF_SIGNED_SSL_SECRET_ID", "") - env_devel.setdefault("DIRECTOR_SELF_SIGNED_SSL_SECRET_NAME", "") - env_devel.setdefault("DIRECTOR_SELF_SIGNED_SSL_FILENAME", "") env_devel["API_SERVER_DEV_FEATURES_ENABLED"] = "1" @@ -98,9 +108,9 @@ def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict: @pytest.fixture(scope="module") -def env_file_for_testing( +def env_file_for_docker_compose( temp_folder: Path, - testing_environ_vars: dict[str, str], + env_vars_for_docker_compose: dict[str, str], osparc_simcore_root_dir: Path, ) -> Iterator[Path]: """Dumps all the environment variables into an $(temp_folder)/.env.test file @@ -117,7 +127,7 @@ def env_file_for_testing( f"# Auto-generated from env_file_for_testing in {__file__}", file=fh, ) - for key, value in sorted(testing_environ_vars.items()): + for key, value in sorted(env_vars_for_docker_compose.items()): # NOTE: python-dotenv parses JSON encoded strings correctly, but # writing them back shows an issue. if the original ENV is something like MY_ENV='{"correct": "encodedjson"}' # it goes to MY_ENV={"incorrect": "encodedjson"}! @@ -146,7 +156,7 @@ def env_file_for_testing( def simcore_docker_compose( osparc_simcore_root_dir: Path, osparc_simcore_scripts_dir: Path, - env_file_for_testing: Path, + env_file_for_docker_compose: Path, temp_folder: Path, ) -> dict[str, Any]: """Resolves docker-compose for simcore stack in local host @@ -156,7 +166,7 @@ def simcore_docker_compose( COMPOSE_FILENAMES = ["docker-compose.yml", "docker-compose.local.yml"] # ensures .env at git_root_dir - assert env_file_for_testing.exists() + assert env_file_for_docker_compose.exists() # target docker compose path docker_compose_paths = [ @@ -171,7 +181,7 @@ def simcore_docker_compose( project_dir=osparc_simcore_root_dir / "services", scripts_dir=osparc_simcore_scripts_dir, docker_compose_paths=docker_compose_paths, - env_file_path=env_file_for_testing, + env_file_path=env_file_for_docker_compose, destination_path=temp_folder / "simcore_docker_compose.yml", ) @@ -180,7 +190,7 @@ def simcore_docker_compose( def ops_docker_compose( osparc_simcore_root_dir: Path, osparc_simcore_scripts_dir: Path, - env_file_for_testing: Path, + env_file_for_docker_compose: Path, temp_folder: Path, ) -> dict[str, Any]: """Filters only services in docker-compose-ops.yml and returns yaml data @@ -188,7 +198,7 @@ def ops_docker_compose( Produces same as `make .stack-ops.yml` in a temporary folder """ # ensures .env at git_root_dir, which will be used as current directory - assert env_file_for_testing.exists() + assert env_file_for_docker_compose.exists() # target docker compose path docker_compose_path = ( @@ -200,7 +210,7 @@ def ops_docker_compose( project_dir=osparc_simcore_root_dir / "services", scripts_dir=osparc_simcore_scripts_dir, docker_compose_paths=docker_compose_path, - env_file_path=env_file_for_testing, + env_file_path=env_file_for_docker_compose, destination_path=temp_folder / "ops_docker_compose.yml", ) diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index f2f83e4e022..dc496302e29 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -1,7 +1,8 @@ # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments import asyncio import logging diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py index c295e328fc8..b6f221c7c80 100644 --- a/tests/swarm-deploy/conftest.py +++ b/tests/swarm-deploy/conftest.py @@ -59,9 +59,9 @@ def core_services_selection(simcore_docker_compose: dict) -> list[ServiceNameStr @pytest.fixture(scope="module") -def core_stack_namespace(testing_environ_vars: EnvVarsDict) -> str: +def core_stack_namespace(env_vars_for_docker_compose: EnvVarsDict) -> str: """returns 'com.docker.stack.namespace' service label core stack""" - stack_name = testing_environ_vars["SWARM_STACK_NAME"] + stack_name = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert stack_name is not None return stack_name @@ -144,7 +144,7 @@ def ops_services_selection(ops_docker_compose: ComposeSpec) -> list[ServiceNameS @pytest.fixture(scope="module") -def ops_stack_namespace(testing_environ_vars: EnvVarsDict) -> str: +def ops_stack_namespace(env_vars_for_docker_compose: EnvVarsDict) -> str: """returns 'com.docker.stack.namespace' service label operations stack""" return "pytest-ops" From 6d3b23c11a17337f316d25be253620eee4a0ff9d Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 14:42:49 +0100 Subject: [PATCH 195/201] fixes fixture --- .../src/pytest_simcore/docker_swarm.py | 4 ++-- .../src/pytest_simcore/minio_service.py | 8 ++++---- .../src/pytest_simcore/postgres_service.py | 12 +++++++----- .../src/pytest_simcore/rabbit_service.py | 14 ++++++++------ .../src/pytest_simcore/redis_service.py | 9 +++++---- .../src/pytest_simcore/simcore_services.py | 4 ++-- .../src/pytest_simcore/simcore_storage_service.py | 9 ++++++--- .../pytest_simcore/simcore_webserver_service.py | 7 +++++-- .../src/pytest_simcore/traefik_service.py | 5 +++-- tests/public-api/conftest.py | 14 ++++++++------ 10 files changed, 50 insertions(+), 36 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index 657f84f9667..579d9b52bca 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -253,12 +253,12 @@ def docker_stack( core_docker_compose_file: Path, ops_docker_compose_file: Path, keep_docker_up: bool, - testing_environ_vars: EnvVarsDict, + env_vars_for_docker_compose: EnvVarsDict, ) -> Iterator[dict]: """deploys core and ops stacks and returns as soon as all are running""" # WARNING: keep prefix "pytest-" in stack names - core_stack_name = testing_environ_vars["SWARM_STACK_NAME"] + core_stack_name = env_vars_for_docker_compose["SWARM_STACK_NAME"] ops_stack_name = "pytest-ops" assert core_stack_name diff --git a/packages/pytest-simcore/src/pytest_simcore/minio_service.py b/packages/pytest-simcore/src/pytest_simcore/minio_service.py index 46cee6fbeeb..ff7586a40de 100644 --- a/packages/pytest-simcore/src/pytest_simcore/minio_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/minio_service.py @@ -13,15 +13,15 @@ @pytest.fixture def minio_s3_settings( - docker_stack: dict, testing_environ_vars: dict, faker: Faker + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict, faker: Faker ) -> S3Settings: assert "pytest-ops_minio" in docker_stack["services"] return S3Settings( - S3_ACCESS_KEY=testing_environ_vars["S3_ACCESS_KEY"], - S3_SECRET_KEY=testing_environ_vars["S3_SECRET_KEY"], + S3_ACCESS_KEY=env_vars_for_docker_compose["S3_ACCESS_KEY"], + S3_SECRET_KEY=env_vars_for_docker_compose["S3_SECRET_KEY"], S3_ENDPOINT=f"http://{get_localhost_ip()}:{get_service_published_port('minio')}", - S3_BUCKET_NAME=testing_environ_vars["S3_BUCKET_NAME"], + S3_BUCKET_NAME=env_vars_for_docker_compose["S3_BUCKET_NAME"], S3_REGION="us-east-1", ) diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py index 24eddd0221f..c4df0b40c3b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py @@ -131,16 +131,18 @@ def database_from_template_before_each_function( @pytest.fixture(scope="module") -def postgres_dsn(docker_stack: dict, testing_environ_vars: dict) -> PostgresTestConfig: +def postgres_dsn( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> PostgresTestConfig: assert "pytest-simcore_postgres" in docker_stack["services"] pg_config: PostgresTestConfig = { - "user": testing_environ_vars["POSTGRES_USER"], - "password": testing_environ_vars["POSTGRES_PASSWORD"], - "database": testing_environ_vars["POSTGRES_DB"], + "user": env_vars_for_docker_compose["POSTGRES_USER"], + "password": env_vars_for_docker_compose["POSTGRES_PASSWORD"], + "database": env_vars_for_docker_compose["POSTGRES_DB"], "host": get_localhost_ip(), "port": get_service_published_port( - "postgres", testing_environ_vars["POSTGRES_PORT"] + "postgres", env_vars_for_docker_compose["POSTGRES_PORT"] ), } diff --git a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py index 47188400e79..938a2435283 100644 --- a/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/rabbit_service.py @@ -36,19 +36,21 @@ async def wait_till_rabbit_responsive(url: str) -> None: @pytest.fixture def rabbit_env_vars_dict( docker_stack: dict, - testing_environ_vars: dict, + env_vars_for_docker_compose: EnvVarsDict, ) -> EnvVarsDict: - prefix = testing_environ_vars["SWARM_STACK_NAME"] + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_rabbit" in docker_stack["services"] - port = get_service_published_port("rabbit", testing_environ_vars["RABBIT_PORT"]) + port = get_service_published_port( + "rabbit", env_vars_for_docker_compose["RABBIT_PORT"] + ) return { - "RABBIT_USER": testing_environ_vars["RABBIT_USER"], - "RABBIT_PASSWORD": testing_environ_vars["RABBIT_PASSWORD"], + "RABBIT_USER": env_vars_for_docker_compose["RABBIT_USER"], + "RABBIT_PASSWORD": env_vars_for_docker_compose["RABBIT_PASSWORD"], "RABBIT_HOST": get_localhost_ip(), "RABBIT_PORT": f"{port}", - "RABBIT_SECURE": testing_environ_vars["RABBIT_SECURE"], + "RABBIT_SECURE": env_vars_for_docker_compose["RABBIT_SECURE"], } diff --git a/packages/pytest-simcore/src/pytest_simcore/redis_service.py b/packages/pytest-simcore/src/pytest_simcore/redis_service.py index e9c16abcda5..98cf03a595b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/redis_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/redis_service.py @@ -19,6 +19,7 @@ from .helpers.docker import get_service_published_port from .helpers.host import get_localhost_ip +from .helpers.typing_env import EnvVarsDict log = logging.getLogger(__name__) @@ -26,21 +27,21 @@ @pytest.fixture async def redis_settings( docker_stack: dict, # stack is up - testing_environ_vars: dict, + env_vars_for_docker_compose: EnvVarsDict, ) -> RedisSettings: """Returns the settings of a redis service that is up and responsive""" - prefix = testing_environ_vars["SWARM_STACK_NAME"] + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_redis" in docker_stack["services"] port = get_service_published_port( - "simcore_redis", testing_environ_vars["REDIS_PORT"] + "simcore_redis", int(env_vars_for_docker_compose["REDIS_PORT"]) ) # test runner is running on the host computer settings = RedisSettings( REDIS_HOST=get_localhost_ip(), REDIS_PORT=PortInt(port), - REDIS_PASSWORD=testing_environ_vars["REDIS_PASSWORD"], + REDIS_PASSWORD=env_vars_for_docker_compose["REDIS_PASSWORD"], ) await wait_till_redis_responsive(settings.build_redis_dsn(RedisDatabase.RESOURCES)) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 1bdb143f418..11dd165a963 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -116,11 +116,11 @@ def create(cls, service_name: str, baseurl): def services_endpoint( core_services_selection: list[str], docker_stack: dict, - testing_environ_vars: EnvVarsDict, + env_vars_for_docker_compose: EnvVarsDict, ) -> dict[str, URL]: services_endpoint = {} - stack_name = testing_environ_vars["SWARM_STACK_NAME"] + stack_name = env_vars_for_docker_compose["SWARM_STACK_NAME"] for service in core_services_selection: service = _SERVICE_NAME_REPLACEMENTS.get(service, service) assert f"{stack_name}_{service}" in docker_stack["services"] diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py index 9628d1058c9..72431180f4a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_service.py @@ -17,14 +17,17 @@ from .helpers.docker import get_service_published_port from .helpers.host import get_localhost_ip +from .helpers.typing_env import EnvVarsDict @pytest.fixture(scope="module") -def storage_endpoint(docker_stack: dict, testing_environ_vars: dict) -> Iterable[URL]: - prefix = testing_environ_vars["SWARM_STACK_NAME"] +def storage_endpoint( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> Iterable[URL]: + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_storage" in docker_stack["services"] - default_port = testing_environ_vars["STORAGE_ENDPOINT"].split(":")[1] + default_port = env_vars_for_docker_compose["STORAGE_ENDPOINT"].split(":")[1] endpoint = ( f"{get_localhost_ip()}:{get_service_published_port('storage', default_port)}" ) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py index 3d5d083edfe..b885b62232f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py @@ -10,11 +10,14 @@ from yarl import URL from .helpers.docker import get_service_published_port +from .helpers.typing_env import EnvVarsDict @pytest.fixture(scope="module") -def webserver_endpoint(docker_stack: dict, testing_environ_vars: dict) -> URL: - prefix = testing_environ_vars["SWARM_STACK_NAME"] +def webserver_endpoint( + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict +) -> URL: + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_webserver" in docker_stack["services"] endpoint = f"127.0.0.1:{get_service_published_port('webserver', 8080)}" diff --git a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py index 462dfdb29e2..a75df5aae08 100644 --- a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py @@ -12,16 +12,17 @@ from yarl import URL from .helpers.docker import get_service_published_port +from .helpers.typing_env import EnvVarsDict @pytest.fixture(scope="module") def traefik_endpoints( - docker_stack: dict, testing_environ_vars: dict + docker_stack: dict, env_vars_for_docker_compose: EnvVarsDict ) -> tuple[URL, URL, URL]: """get the endpoint for the given simcore_service. NOTE: simcore_service defined as a parametrization """ - prefix = testing_environ_vars["SWARM_STACK_NAME"] + prefix = env_vars_for_docker_compose["SWARM_STACK_NAME"] assert f"{prefix}_traefik" in docker_stack["services"] traefik_api_endpoint = f"127.0.0.1:{get_service_published_port('traefik', 8080)}" diff --git a/tests/public-api/conftest.py b/tests/public-api/conftest.py index 935d63a18a8..3b4a0b27b9c 100644 --- a/tests/public-api/conftest.py +++ b/tests/public-api/conftest.py @@ -46,12 +46,14 @@ @pytest.fixture(scope="session") -def testing_environ_vars(testing_environ_vars: EnvVarsDict) -> EnvVarsDict: - # OVERRIDES packages/pytest-simcore/src/pytest_simcore/docker_compose.py::testing_environ_vars fixture +def env_vars_for_docker_compose( + env_vars_for_docker_compose: EnvVarsDict, +) -> EnvVarsDict: + # OVERRIDES packages/pytest-simcore/src/pytest_simcore/docker_compose.py::env_vars_for_docker_compose fixture # help faster update of service_metadata table by catalog - testing_environ_vars["CATALOG_BACKGROUND_TASK_REST_TIME"] = "1" - return testing_environ_vars.copy() + env_vars_for_docker_compose["CATALOG_BACKGROUND_TASK_REST_TIME"] = "1" + return env_vars_for_docker_compose.copy() @pytest.fixture(scope="module") @@ -170,7 +172,7 @@ def registered_user( def services_registry( docker_registry_image_injector: Callable, registered_user: RegisteredUserDict, - testing_environ_vars: dict[str, str], + env_vars_for_docker_compose: dict[str, str], ) -> dict[ServiceNameStr, ServiceInfoDict]: # NOTE: service image MUST be injected in registry AFTER user is registered # @@ -249,7 +251,7 @@ def services_registry( } wait_for_catalog_to_detect = float( - testing_environ_vars["CATALOG_BACKGROUND_TASK_REST_TIME"] + env_vars_for_docker_compose["CATALOG_BACKGROUND_TASK_REST_TIME"] ) print( f"Catalog should take {wait_for_catalog_to_detect} secs to detect new services ...", From ac28f3fc8e7d17bda9c6264e02d3300f548d90be Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 15:01:06 +0100 Subject: [PATCH 196/201] disable tracing --- .../src/pytest_simcore/docker_compose.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 2da02b3ade9..17e0bf44a97 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -83,11 +83,18 @@ def env_vars_for_docker_compose(env_devel_file: Path) -> EnvVarsDict: # ensure catalog refreshes services access rights fast ] = "1" - # DIRECTOR ------------------- - env_devel["DIRECTOR_REGISTRY_CACHING"] = "False" - # NOTE: this will make TracingSettings fail and therefore the default factory of every *_TRACING field will be set to None + # TRACING + # NOTE: should go away with pydantic v2 env_devel["TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT"] = "null" env_devel["TRACING_OPENTELEMETRY_COLLECTOR_PORT"] = "null" + for key in env_devel: + if key.endswith("_TRACING"): + env_devel[key] = "null" + + # DIRECTOR + env_devel["DIRECTOR_REGISTRY_CACHING"] = "False" + # NOTE: this will make TracingSettings fail and therefore the default factory of every *_TRACING field will be set to None + # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service env_devel["DIRECTOR_DEFAULT_MAX_MEMORY"] = "268435456" env_devel["DIRECTOR_DEFAULT_MAX_NANO_CPUS"] = "10000000" From efb50659bae4b792d5d6ee8a986b6f1f62640efc Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 15:25:48 +0100 Subject: [PATCH 197/201] docker-path --- packages/pytest-simcore/src/pytest_simcore/docker_compose.py | 2 ++ .../settings-library/src/settings_library/docker_registry.py | 1 + services/director-v2/tests/integration/02/utils.py | 2 +- services/docker-compose.yml | 1 + 4 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py index 17e0bf44a97..737fa7bc35b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_compose.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_compose.py @@ -98,6 +98,8 @@ def env_vars_for_docker_compose(env_devel_file: Path) -> EnvVarsDict: # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service env_devel["DIRECTOR_DEFAULT_MAX_MEMORY"] = "268435456" env_devel["DIRECTOR_DEFAULT_MAX_NANO_CPUS"] = "10000000" + env_devel["DIRECTOR_LOGLEVEL"] = "DEBUG" + env_devel["REGISTRY_PATH"] = "127.0.0.1:5000" env_devel.setdefault("DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS", "") diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index bb365cb9785..08ca0818b7d 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -10,6 +10,7 @@ class RegistrySettings(BaseCustomSettings): REGISTRY_AUTH: bool = Field(..., description="do registry authentication") REGISTRY_PATH: str | None = Field( default=None, + # This is useful in case of a local registry, where the registry url (path) is relative to the host docker engine" description="development mode only, in case a local registry is used", ) # NOTE: name is missleading, http or https protocol are not included diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 0c5f10c07bd..ff90ac59488 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -425,7 +425,7 @@ async def assert_all_services_running( ) ) - assert all(x == "running" for x in service_states) + assert all(state == "running" for state in service_states) print("--> all services are up and running!") diff --git a/services/docker-compose.yml b/services/docker-compose.yml index d962f1d0e65..e31261ca20c 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -268,6 +268,7 @@ services: POSTGRES_USER: ${POSTGRES_USER} REGISTRY_AUTH: ${REGISTRY_AUTH} + REGISTRY_PATH: ${REGISTRY_PATH} REGISTRY_PW: ${REGISTRY_PW} REGISTRY_SSL: ${REGISTRY_SSL} REGISTRY_URL: ${REGISTRY_URL} From 136b9beede3b58599f68cf8f1681a9d129e2e4f8 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 15:28:21 +0100 Subject: [PATCH 198/201] fixes tests --- ...ixed_dynamic_sidecar_and_legacy_project.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index dc496302e29..5bcf0449136 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -51,10 +51,7 @@ "storage", ] -pytest_simcore_ops_services_selection = [ - "adminer", - "minio", -] +pytest_simcore_ops_services_selection = ["adminer", "minio", "portainer"] @pytest.fixture() @@ -165,14 +162,14 @@ def _assemble_node_data(spec: dict, label: str) -> dict[str, str]: dy_static_file_server_service, "LEGACY", ), - uuid_dynamic_sidecar: _assemble_node_data( - dy_static_file_server_dynamic_sidecar_service, - "DYNAMIC", - ), - uuid_dynamic_sidecar_compose: _assemble_node_data( - dy_static_file_server_dynamic_sidecar_compose_spec_service, - "DYNAMIC_COMPOSE", - ), + # uuid_dynamic_sidecar: _assemble_node_data( + # dy_static_file_server_dynamic_sidecar_service, + # "DYNAMIC", + # ), + # uuid_dynamic_sidecar_compose: _assemble_node_data( + # dy_static_file_server_dynamic_sidecar_compose_spec_service, + # "DYNAMIC_COMPOSE", + # ), }, ) From 569d936fd585d205c346c8debb055883cd36493e Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:32:34 +0100 Subject: [PATCH 199/201] missing envvar --- .env-devel | 1 + 1 file changed, 1 insertion(+) diff --git a/.env-devel b/.env-devel index e6973be8ad6..54e11e3fe5b 100644 --- a/.env-devel +++ b/.env-devel @@ -192,6 +192,7 @@ REDIS_SECURE=false REDIS_USER=null REGISTRY_AUTH=True +REGISTRY_PATH="" REGISTRY_PW=adminadminadmin REGISTRY_SSL=True REGISTRY_URL=registry.osparc-master.speag.com From 3a8224e3bb1ea26b3d63042f5e8fe1ba3a3a8b7e Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:35:24 +0100 Subject: [PATCH 200/201] improves error --- .../pytest-simcore/src/pytest_simcore/environment_configs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index e8b34d4aa1b..0454335bf91 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -112,7 +112,8 @@ def _substitute(key, value) -> tuple[str, str]: return key, value except KeyError: pytest.fail( - f"{expected_env_var} is not defined in {env_devel_file} but used in docker-compose services[{service}].environment[{key}]" + f"{expected_env_var} is not defined in '{env_devel_file}' but it " + f"is used as a rhs variable in the docker-compose services[{service_name}].environment[{key}]" ) return key, value From b8a0ee50c67079d08af4b32646bb02722ab408f4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 18 Nov 2024 21:09:42 +0100 Subject: [PATCH 201/201] forgot uncomment --- ...t_mixed_dynamic_sidecar_and_legacy_project.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 5bcf0449136..4bfe998ad59 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -162,14 +162,14 @@ def _assemble_node_data(spec: dict, label: str) -> dict[str, str]: dy_static_file_server_service, "LEGACY", ), - # uuid_dynamic_sidecar: _assemble_node_data( - # dy_static_file_server_dynamic_sidecar_service, - # "DYNAMIC", - # ), - # uuid_dynamic_sidecar_compose: _assemble_node_data( - # dy_static_file_server_dynamic_sidecar_compose_spec_service, - # "DYNAMIC_COMPOSE", - # ), + uuid_dynamic_sidecar: _assemble_node_data( + dy_static_file_server_dynamic_sidecar_service, + "DYNAMIC", + ), + uuid_dynamic_sidecar_compose: _assemble_node_data( + dy_static_file_server_dynamic_sidecar_compose_spec_service, + "DYNAMIC_COMPOSE", + ), }, )