From e32787bc1b99c07668a189403e6672f3634138ab Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Mon, 11 Nov 2024 08:44:11 +0100
Subject: [PATCH 01/17] =?UTF-8?q?=E2=9C=A8Computational=20backend:=20persi?=
=?UTF-8?q?st=20cancellation=20request=20(#6694)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../8bfe65a5e294_add_cancellation_mark.py | 29 +++++
.../models/comp_runs.py | 6 +
.../models/comp_runs.py | 32 ++++-
.../modules/comp_scheduler/_base_scheduler.py | 26 +++--
.../comp_scheduler/_scheduler_factory.py | 2 +-
.../modules/db/repositories/comp_runs.py | 13 ++-
services/director-v2/tests/conftest.py | 2 +-
.../tests/unit/with_dbs/conftest.py | 47 +++++++-
...t_modules_comp_scheduler_dask_scheduler.py | 110 ++++++++++++++++--
9 files changed, 240 insertions(+), 27 deletions(-)
create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py
new file mode 100644
index 00000000000..ecbe20b40e8
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py
@@ -0,0 +1,29 @@
+"""add cancellation mark
+
+Revision ID: 8bfe65a5e294
+Revises: 5ad02358751a
+Create Date: 2024-11-08 14:40:59.266181+00:00
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "8bfe65a5e294"
+down_revision = "5ad02358751a"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "comp_runs", sa.Column("cancelled", sa.DateTime(timezone=True), nullable=True)
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column("comp_runs", "cancelled")
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py
index e402a171562..eb84cefaa76 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py
@@ -99,6 +99,12 @@
nullable=True,
doc="When the run was finished",
),
+ sa.Column(
+ "cancelled",
+ sa.DateTime(timezone=True),
+ nullable=True,
+ doc="If filled, when cancellation was requested",
+ ),
sa.Column("metadata", JSONB, nullable=True, doc="the run optional metadata"),
sa.Column(
"use_on_demand_clusters",
diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py
index 1d7800b9788..2af0646c3d3 100644
--- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py
+++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py
@@ -46,6 +46,7 @@ class CompRunsAtDB(BaseModel):
modified: datetime.datetime
started: datetime.datetime | None
ended: datetime.datetime | None
+ cancelled: datetime.datetime | None
metadata: RunMetadataDict = RunMetadataDict()
use_on_demand_clusters: bool
@@ -72,7 +73,7 @@ def convert_null_to_default_cluster_id(cls, v):
@classmethod
def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None:
if v is not None and v.tzinfo is None:
- v = v.replace(tzinfo=datetime.timezone.utc)
+ v = v.replace(tzinfo=datetime.UTC)
return v
@validator("metadata", pre=True)
@@ -93,9 +94,22 @@ class Config:
"user_id": 132,
"cluster_id": 0,
"iteration": 42,
+ "result": "UNKNOWN",
+ "created": "2021-03-01 13:07:34.19161",
+ "modified": "2021-03-01 13:07:34.19161",
+ "cancelled": None,
+ "use_on_demand_clusters": False,
+ },
+ {
+ "run_id": 432,
+ "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5",
+ "user_id": 132,
+ "cluster_id": None, # this default to DEFAULT_CLUSTER_ID
+ "iteration": 42,
"result": "NOT_STARTED",
"created": "2021-03-01 13:07:34.19161",
"modified": "2021-03-01 13:07:34.19161",
+ "cancelled": None,
"use_on_demand_clusters": False,
},
{
@@ -109,6 +123,7 @@ class Config:
"modified": "2021-03-01 13:07:34.19161",
"started": "2021-03-01 8:07:34.19161",
"ended": "2021-03-01 13:07:34.10",
+ "cancelled": None,
"metadata": {
"node_id_names_map": {},
"product_name": "osparc",
@@ -118,5 +133,20 @@ class Config:
},
"use_on_demand_clusters": False,
},
+ {
+ "run_id": 43243,
+ "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5",
+ "user_id": 132,
+ "cluster_id": 123,
+ "iteration": 12,
+ "result": "SUCCESS",
+ "created": "2021-03-01 13:07:34.19161",
+ "modified": "2021-03-01 13:07:34.19161",
+ "started": "2021-03-01 8:07:34.19161",
+ "ended": "2021-03-01 13:07:34.10",
+ "cancelled": None,
+ "metadata": None,
+ "use_on_demand_clusters": False,
+ },
]
}
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
index 08396686e43..cae539596d4 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
@@ -47,7 +47,7 @@
)
from ...core.settings import ComputationalBackendSettings
from ...models.comp_pipelines import CompPipelineAtDB
-from ...models.comp_runs import CompRunsAtDB, RunMetadataDict
+from ...models.comp_runs import RunMetadataDict
from ...models.comp_tasks import CompTaskAtDB
from ...utils.comp_scheduler import (
COMPLETED_STATES,
@@ -131,7 +131,7 @@ async def _triage_changed_tasks(
class ScheduledPipelineParams:
cluster_id: ClusterID
run_metadata: RunMetadataDict
- mark_for_cancellation: bool = False
+ mark_for_cancellation: datetime.datetime | None
use_on_demand_clusters: bool
@@ -169,7 +169,7 @@ async def run_new_pipeline(
return
runs_repo = CompRunsRepository.instance(self.db_engine)
- new_run: CompRunsAtDB = await runs_repo.create(
+ new_run = await runs_repo.create(
user_id=user_id,
project_id=project_id,
cluster_id=cluster_id,
@@ -182,6 +182,7 @@ async def run_new_pipeline(
cluster_id=cluster_id,
run_metadata=new_run.metadata,
use_on_demand_clusters=use_on_demand_clusters,
+ mark_for_cancellation=None,
)
await publish_project_log(
self.rabbitmq_client,
@@ -212,11 +213,18 @@ async def stop_pipeline(
selected_iteration = iteration
# mark the scheduled pipeline for stopping
- self.scheduled_pipelines[
- (user_id, project_id, selected_iteration)
- ].mark_for_cancellation = True
- # ensure the scheduler starts right away
- self._wake_up_scheduler_now()
+ updated_comp_run = await CompRunsRepository.instance(
+ self.db_engine
+ ).mark_for_cancellation(
+ user_id=user_id, project_id=project_id, iteration=selected_iteration
+ )
+ if updated_comp_run:
+ assert updated_comp_run.cancelled is not None # nosec
+ self.scheduled_pipelines[
+ (user_id, project_id, selected_iteration)
+ ].mark_for_cancellation = updated_comp_run.cancelled
+ # ensure the scheduler starts right away
+ self._wake_up_scheduler_now()
async def schedule_all_pipelines(self) -> None:
self.wake_up_event.clear()
@@ -343,7 +351,7 @@ def _need_heartbeat(task: CompTaskAtDB) -> bool:
if task.last_heartbeat is None:
assert task.start # nosec
return bool(
- (utc_now - task.start.replace(tzinfo=datetime.timezone.utc))
+ (utc_now - task.start.replace(tzinfo=datetime.UTC))
> self.service_runtime_heartbeat_interval
)
return bool(
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
index 458950e9798..f8b648eaf48 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
@@ -47,7 +47,7 @@ async def create_from_db(app: FastAPI) -> BaseCompScheduler:
r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID
),
run_metadata=r.metadata,
- mark_for_cancellation=False,
+ mark_for_cancellation=r.cancelled,
use_on_demand_clusters=r.use_on_demand_clusters,
)
for r in runs
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py
index 4f9a8e42b53..955b9dd5858 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py
@@ -3,6 +3,7 @@
from collections import deque
from typing import Any
+import arrow
import sqlalchemy as sa
from aiopg.sa.result import RowProxy
from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID
@@ -146,10 +147,20 @@ async def set_run_result(
) -> CompRunsAtDB | None:
values: dict[str, Any] = {"result": RUNNING_STATE_TO_DB[result_state]}
if final_state:
- values.update({"ended": datetime.datetime.now(tz=datetime.UTC)})
+ values.update({"ended": arrow.utcnow().datetime})
return await self.update(
user_id,
project_id,
iteration,
**values,
)
+
+ async def mark_for_cancellation(
+ self, *, user_id: UserID, project_id: ProjectID, iteration: PositiveInt
+ ) -> CompRunsAtDB | None:
+ return await self.update(
+ user_id,
+ project_id,
+ iteration,
+ cancelled=arrow.utcnow().datetime,
+ )
diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py
index 4e415254486..63abe3d0984 100644
--- a/services/director-v2/tests/conftest.py
+++ b/services/director-v2/tests/conftest.py
@@ -218,7 +218,7 @@ async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]:
@pytest.fixture()
async def async_client(initialized_app: FastAPI) -> AsyncIterable[httpx.AsyncClient]:
async with httpx.AsyncClient(
- app=initialized_app,
+ transport=httpx.ASGITransport(app=initialized_app),
base_url="http://director-v2.testserver.io",
headers={"Content-Type": "application/json"},
) as client:
diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py
index 8dd5527f00a..516730d4e14 100644
--- a/services/director-v2/tests/unit/with_dbs/conftest.py
+++ b/services/director-v2/tests/unit/with_dbs/conftest.py
@@ -11,6 +11,7 @@
from typing import Any, cast
from uuid import uuid4
+import arrow
import pytest
import sqlalchemy as sa
from _helpers import PublishedProject, RunningProject
@@ -318,6 +319,7 @@ async def running_project(
) -> RunningProject:
user = registered_user()
created_project = await project(user, workbench=fake_workbench_without_outputs)
+ now_time = arrow.utcnow().datetime
return RunningProject(
project=created_project,
pipeline=pipeline(
@@ -329,9 +331,50 @@ async def running_project(
project=created_project,
state=StateType.RUNNING,
progress=0.0,
- start=datetime.datetime.now(tz=datetime.UTC),
+ start=now_time,
+ ),
+ runs=runs(
+ user=user,
+ project=created_project,
+ started=now_time,
+ result=StateType.RUNNING,
+ ),
+ )
+
+
+@pytest.fixture
+async def running_project_mark_for_cancellation(
+ registered_user: Callable[..., dict[str, Any]],
+ project: Callable[..., Awaitable[ProjectAtDB]],
+ pipeline: Callable[..., CompPipelineAtDB],
+ tasks: Callable[..., list[CompTaskAtDB]],
+ runs: Callable[..., CompRunsAtDB],
+ fake_workbench_without_outputs: dict[str, Any],
+ fake_workbench_adjacency: dict[str, Any],
+) -> RunningProject:
+ user = registered_user()
+ created_project = await project(user, workbench=fake_workbench_without_outputs)
+ now_time = arrow.utcnow().datetime
+ return RunningProject(
+ project=created_project,
+ pipeline=pipeline(
+ project_id=f"{created_project.uuid}",
+ dag_adjacency_list=fake_workbench_adjacency,
+ ),
+ tasks=tasks(
+ user=user,
+ project=created_project,
+ state=StateType.RUNNING,
+ progress=0.0,
+ start=now_time,
+ ),
+ runs=runs(
+ user=user,
+ project=created_project,
+ result=StateType.RUNNING,
+ started=now_time,
+ cancelled=now_time + datetime.timedelta(seconds=5),
),
- runs=runs(user=user, project=created_project, result=StateType.RUNNING),
)
diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
index fbc90204f83..f9e5ff33c4b 100644
--- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
+++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
@@ -103,9 +103,9 @@ def _assert_dask_client_correctly_initialized(
)
mocked_dask_client.register_handlers.assert_called_once_with(
TaskHandlers(
- cast(
+ cast( # noqa: SLF001
DaskScheduler, scheduler
- )._task_progress_change_handler, # noqa: SLF001
+ )._task_progress_change_handler,
cast(DaskScheduler, scheduler)._task_log_change_handler, # noqa: SLF001
)
)
@@ -280,9 +280,10 @@ def test_scheduler_raises_exception_for_missing_dependencies(
settings = AppSettings.create_from_envs()
app = init_app(settings)
- with pytest.raises(ConfigurationError):
- with TestClient(app, raise_server_exceptions=True) as _:
- pass
+ with pytest.raises(ConfigurationError), TestClient(
+ app, raise_server_exceptions=True
+ ) as _:
+ pass
async def test_empty_pipeline_is_not_scheduled(
@@ -367,7 +368,7 @@ async def test_misconfigured_pipeline_is_not_scheduled(
assert u_id == user["id"]
assert p_id == sleepers_project.uuid
assert it > 0
- assert params.mark_for_cancellation is False
+ assert params.mark_for_cancellation is None
# check the database was properly updated
async with aiopg_engine.acquire() as conn:
result = await conn.execute(
@@ -418,7 +419,7 @@ async def _assert_start_pipeline(
assert u_id == published_project.project.prj_owner
assert p_id == published_project.project.uuid
assert it > 0
- assert params.mark_for_cancellation is False
+ assert params.mark_for_cancellation is None
assert params.run_metadata == run_metadata
# check the database is correctly updated, the run is published
@@ -1029,11 +1030,9 @@ async def test_task_progress_triggers(
parent_project_id=None,
),
)
- await cast(
+ await cast( # noqa: SLF001
DaskScheduler, scheduler
- )._task_progress_change_handler( # noqa: SLF001
- progress_event.json()
- )
+ )._task_progress_change_handler(progress_event.json())
# NOTE: not sure whether it should switch to STARTED.. it would make sense
await _assert_comp_tasks_db(
aiopg_engine,
@@ -1207,7 +1206,7 @@ async def test_handling_scheduling_after_reboot(
mocked_clean_task_output_fct: mock.MagicMock,
reboot_state: RebootState,
):
- """After the dask client is rebooted, or that the director-v2 reboots the scheduler
+ """After the dask client is rebooted, or that the director-v2 reboots the dv-2 internal scheduler
shall continue scheduling correctly. Even though the task might have continued to run
in the dask-scheduler."""
@@ -1279,6 +1278,93 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData:
)
+async def test_handling_cancellation_of_jobs_after_reboot(
+ with_disabled_scheduler_task: None,
+ mocked_dask_client: mock.MagicMock,
+ aiopg_engine: aiopg.sa.engine.Engine,
+ running_project_mark_for_cancellation: RunningProject,
+ scheduler: BaseCompScheduler,
+ mocked_parse_output_data_fct: mock.MagicMock,
+ mocked_clean_task_output_fct: mock.MagicMock,
+):
+ """A running pipeline was cancelled by a user and the DV-2 was restarted BEFORE
+ It could actually cancel the task. On reboot the DV-2 shall recover
+ and actually cancel the pipeline properly"""
+
+ # check initial status
+ await _assert_comp_run_db(
+ aiopg_engine, running_project_mark_for_cancellation, RunningState.STARTED
+ )
+ await _assert_comp_tasks_db(
+ aiopg_engine,
+ running_project_mark_for_cancellation.project.uuid,
+ [t.node_id for t in running_project_mark_for_cancellation.tasks],
+ expected_state=RunningState.STARTED,
+ expected_progress=0,
+ )
+
+ # the backend shall report the tasks as running
+ async def mocked_get_tasks_status(job_ids: list[str]) -> list[DaskClientTaskState]:
+ return [DaskClientTaskState.PENDING_OR_STARTED for j in job_ids]
+
+ mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status
+ # Running the scheduler, should actually cancel the run now
+ await run_comp_scheduler(scheduler)
+ mocked_dask_client.abort_computation_task.assert_called()
+ assert mocked_dask_client.abort_computation_task.call_count == len(
+ [
+ t.node_id
+ for t in running_project_mark_for_cancellation.tasks
+ if t.node_class == NodeClass.COMPUTATIONAL
+ ]
+ )
+ # in the DB they are still running, they will be stopped in the next iteration
+ await _assert_comp_tasks_db(
+ aiopg_engine,
+ running_project_mark_for_cancellation.project.uuid,
+ [
+ t.node_id
+ for t in running_project_mark_for_cancellation.tasks
+ if t.node_class == NodeClass.COMPUTATIONAL
+ ],
+ expected_state=RunningState.STARTED,
+ expected_progress=0,
+ )
+ await _assert_comp_run_db(
+ aiopg_engine, running_project_mark_for_cancellation, RunningState.STARTED
+ )
+
+ # the backend shall now report the tasks as aborted
+ async def mocked_get_tasks_status_aborted(
+ job_ids: list[str],
+ ) -> list[DaskClientTaskState]:
+ return [DaskClientTaskState.ABORTED for j in job_ids]
+
+ mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status_aborted
+
+ async def _return_random_task_result(job_id) -> TaskOutputData:
+ raise TaskCancelledError
+
+ mocked_dask_client.get_task_result.side_effect = _return_random_task_result
+ await run_comp_scheduler(scheduler)
+ # now should be stopped
+ await _assert_comp_tasks_db(
+ aiopg_engine,
+ running_project_mark_for_cancellation.project.uuid,
+ [
+ t.node_id
+ for t in running_project_mark_for_cancellation.tasks
+ if t.node_class == NodeClass.COMPUTATIONAL
+ ],
+ expected_state=RunningState.ABORTED,
+ expected_progress=1,
+ )
+ await _assert_comp_run_db(
+ aiopg_engine, running_project_mark_for_cancellation, RunningState.ABORTED
+ )
+ mocked_clean_task_output_fct.assert_called()
+
+
@pytest.fixture
def with_fast_service_heartbeat_s(monkeypatch: pytest.MonkeyPatch) -> int:
seconds = 1
From ce9d3a590704ee8a5b4ab01dddd9bd59db1f6e0f Mon Sep 17 00:00:00 2001
From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com>
Date: Mon, 11 Nov 2024 12:41:58 +0100
Subject: [PATCH 02/17] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor=20project?=
=?UTF-8?q?=20listing=20DB=20function=20(=F0=9F=9A=A8=20=20We=20no=20longe?=
=?UTF-8?q?r=20list=20projects=20that=20do=20not=20have=20a=20product=20as?=
=?UTF-8?q?signed)=20(#6692)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../src/models_library/folders.py | 34 +-
.../src/models_library/workspaces.py | 34 +-
.../projects/_crud_api_read.py | 57 +--
.../simcore_service_webserver/projects/db.py | 454 ++++++++----------
.../02/test_projects_crud_handlers.py | 9 +-
5 files changed, 291 insertions(+), 297 deletions(-)
diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py
index 4d73618750c..485e74b86c8 100644
--- a/packages/models-library/src/models_library/folders.py
+++ b/packages/models-library/src/models_library/folders.py
@@ -1,13 +1,41 @@
from datetime import datetime
+from enum import auto
from typing import TypeAlias
-from models_library.users import GroupID, UserID
-from models_library.workspaces import WorkspaceID
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import BaseModel, Field, PositiveInt, validator
+
+from .users import GroupID, UserID
+from .utils.enums import StrAutoEnum
+from .workspaces import WorkspaceID
FolderID: TypeAlias = PositiveInt
+class FolderScope(StrAutoEnum):
+ ROOT = auto()
+ SPECIFIC = auto()
+ ALL = auto()
+
+
+class FolderQuery(BaseModel):
+ folder_scope: FolderScope
+ folder_id: PositiveInt | None = None
+
+ @validator("folder_id", pre=True, always=True)
+ @classmethod
+ def validate_folder_id(cls, value, values):
+ scope = values.get("folder_scope")
+ if scope == FolderScope.SPECIFIC and value is None:
+ raise ValueError(
+ "folder_id must be provided when folder_scope is SPECIFIC."
+ )
+ if scope != FolderScope.SPECIFIC and value is not None:
+ raise ValueError(
+ "folder_id should be None when folder_scope is not SPECIFIC."
+ )
+ return value
+
+
#
# DB
#
diff --git a/packages/models-library/src/models_library/workspaces.py b/packages/models-library/src/models_library/workspaces.py
index c08e02501cb..e5b816623fe 100644
--- a/packages/models-library/src/models_library/workspaces.py
+++ b/packages/models-library/src/models_library/workspaces.py
@@ -1,13 +1,41 @@
from datetime import datetime
+from enum import auto
from typing import TypeAlias
-from models_library.access_rights import AccessRights
-from models_library.users import GroupID
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import BaseModel, Field, PositiveInt, validator
+
+from .access_rights import AccessRights
+from .users import GroupID
+from .utils.enums import StrAutoEnum
WorkspaceID: TypeAlias = PositiveInt
+class WorkspaceScope(StrAutoEnum):
+ PRIVATE = auto()
+ SHARED = auto()
+ ALL = auto()
+
+
+class WorkspaceQuery(BaseModel):
+ workspace_scope: WorkspaceScope
+ workspace_id: PositiveInt | None = None
+
+ @validator("workspace_id", pre=True, always=True)
+ @classmethod
+ def validate_workspace_id(cls, value, values):
+ scope = values.get("workspace_scope")
+ if scope == WorkspaceScope.SHARED and value is None:
+ raise ValueError(
+ "workspace_id must be provided when workspace_scope is SHARED."
+ )
+ if scope != WorkspaceScope.SHARED and value is not None:
+ raise ValueError(
+ "workspace_id should be None when workspace_scope is not SHARED."
+ )
+ return value
+
+
#
# DB
#
diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py
index f8b6aee4ff9..4d4352d5229 100644
--- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py
+++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py
@@ -6,16 +6,16 @@
"""
from aiohttp import web
-from models_library.access_rights import AccessRights
from models_library.api_schemas_webserver._base import OutputSchema
from models_library.api_schemas_webserver.projects import ProjectListItem
-from models_library.folders import FolderID
+from models_library.folders import FolderID, FolderQuery, FolderScope
from models_library.projects import ProjectID
from models_library.rest_ordering import OrderBy
-from models_library.users import GroupID, UserID
-from models_library.workspaces import WorkspaceID
+from models_library.users import UserID
+from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
from pydantic import NonNegativeInt
from servicelib.utils import logged_gather
+from simcore_postgres_database.models.projects import ProjectType
from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB
from simcore_service_webserver.workspaces._workspaces_api import (
check_user_workspace_access,
@@ -23,7 +23,6 @@
from ..catalog.client import get_services_for_user_in_product
from ..folders import _folders_db as folders_db
-from ..workspaces import _workspaces_db as workspaces_db
from . import projects_api
from ._permalink_api import update_or_pop_permalink_in_project
from .db import ProjectDBAPI
@@ -36,7 +35,6 @@ async def _append_fields(
user_id: UserID,
project: ProjectDict,
is_template: bool,
- workspace_access_rights: dict[GroupID, AccessRights] | None,
model_schema_cls: type[OutputSchema],
):
# state
@@ -50,12 +48,6 @@ async def _append_fields(
# permalink
await update_or_pop_permalink_in_project(request, project)
- # replace project access rights (if project is in workspace)
- if workspace_access_rights:
- project["accessRights"] = {
- gid: access.dict() for gid, access in workspace_access_rights.items()
- }
-
# validate
return model_schema_cls.parse_obj(project).data(exclude_unset=True)
@@ -110,15 +102,25 @@ async def list_projects( # pylint: disable=too-many-arguments
db_projects, db_project_types, total_number_projects = await db.list_projects(
product_name=product_name,
user_id=user_id,
- workspace_id=workspace_id,
- folder_id=folder_id,
+ workspace_query=(
+ WorkspaceQuery(
+ workspace_scope=WorkspaceScope.SHARED, workspace_id=workspace_id
+ )
+ if workspace_id
+ else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE)
+ ),
+ folder_query=(
+ FolderQuery(folder_scope=FolderScope.SPECIFIC, folder_id=folder_id)
+ if folder_id
+ else FolderQuery(folder_scope=FolderScope.ROOT)
+ ),
# attrs
filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type),
filter_by_services=user_available_services,
filter_trashed=trashed,
filter_hidden=show_hidden,
# composed attrs
- search=search,
+ filter_by_text=search,
# pagination
offset=offset,
limit=limit,
@@ -126,14 +128,6 @@ async def list_projects( # pylint: disable=too-many-arguments
order_by=order_by,
)
- # If workspace, override project access rights
- workspace_access_rights = None
- if workspace_id:
- workspace_db = await workspaces_db.get_workspace_for_user(
- app, user_id=user_id, workspace_id=workspace_id, product_name=product_name
- )
- workspace_access_rights = workspace_db.access_rights
-
projects: list[ProjectDict] = await logged_gather(
*(
_append_fields(
@@ -141,7 +135,6 @@ async def list_projects( # pylint: disable=too-many-arguments
user_id=user_id,
project=prj,
is_template=prj_type == ProjectTypeDB.TEMPLATE,
- workspace_access_rights=workspace_access_rights,
model_schema_cls=ProjectListItem,
)
for prj, prj_type in zip(db_projects, db_project_types)
@@ -170,19 +163,18 @@ async def list_projects_full_search(
request.app, user_id, product_name, only_key_versions=True
)
- (
- db_projects,
- db_project_types,
- total_number_projects,
- ) = await db.list_projects_full_search(
- user_id=user_id,
+ (db_projects, db_project_types, total_number_projects,) = await db.list_projects(
product_name=product_name,
+ user_id=user_id,
+ workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL),
+ folder_query=FolderQuery(folder_scope=FolderScope.ALL),
filter_by_services=user_available_services,
- text=text,
+ filter_by_text=text,
+ filter_tag_ids_list=tag_ids_list,
+ filter_by_project_type=ProjectType.STANDARD,
offset=offset,
limit=limit,
order_by=order_by,
- tag_ids_list=tag_ids_list,
)
projects: list[ProjectDict] = await logged_gather(
@@ -192,7 +184,6 @@ async def list_projects_full_search(
user_id=user_id,
project=prj,
is_template=prj_type == ProjectTypeDB.TEMPLATE,
- workspace_access_rights=None,
model_schema_cls=ProjectListItem,
)
for prj, prj_type in zip(db_projects, db_project_types)
diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py
index 5e0c216f77e..2281b807a71 100644
--- a/services/web/server/src/simcore_service_webserver/projects/db.py
+++ b/services/web/server/src/simcore_service_webserver/projects/db.py
@@ -16,7 +16,7 @@
from aiopg.sa.connection import SAConnection
from aiopg.sa.result import ResultProxy, RowProxy
from models_library.basic_types import IDStr
-from models_library.folders import FolderID
+from models_library.folders import FolderQuery, FolderScope
from models_library.products import ProductName
from models_library.projects import ProjectID, ProjectIDStr
from models_library.projects_comments import CommentID, ProjectsCommentsDB
@@ -31,7 +31,7 @@
from models_library.users import UserID
from models_library.utils.fastapi_encoders import jsonable_encoder
from models_library.wallets import WalletDB, WalletID
-from models_library.workspaces import WorkspaceID
+from models_library.workspaces import WorkspaceQuery, WorkspaceScope
from pydantic import parse_obj_as
from pydantic.types import PositiveInt
from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY
@@ -59,7 +59,7 @@
from sqlalchemy import func, literal_column
from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
from sqlalchemy.dialects.postgresql import insert as pg_insert
-from sqlalchemy.sql import and_
+from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, and_
from tenacity import TryAgain
from tenacity.asyncio import AsyncRetrying
from tenacity.retry import retry_if_exception_type
@@ -350,21 +350,22 @@ async def upsert_project_linked_product(
).group_by(project_to_groups.c.project_uuid)
).subquery("access_rights_subquery")
- async def list_projects( # pylint: disable=too-many-arguments
+ async def list_projects( # pylint: disable=too-many-arguments,too-many-statements,too-many-branches
self,
*,
- # hierarchy filters
- product_name: str,
+ product_name: ProductName,
user_id: PositiveInt,
- workspace_id: WorkspaceID | None,
- folder_id: FolderID | None = None,
+ # hierarchy filters
+ workspace_query: WorkspaceQuery,
+ folder_query: FolderQuery,
# attribute filters
- search: str | None = None,
filter_by_project_type: ProjectType | None = None,
filter_by_services: list[dict] | None = None,
filter_published: bool | None = False,
filter_hidden: bool | None = False,
filter_trashed: bool | None = False,
+ filter_by_text: str | None = None,
+ filter_tag_ids_list: list[int] | None = None,
# pagination
offset: int | None = 0,
limit: int | None = None,
@@ -373,156 +374,9 @@ async def list_projects( # pylint: disable=too-many-arguments
field=IDStr("last_change_date"), direction=OrderDirection.DESC
),
) -> tuple[list[dict[str, Any]], list[ProjectType], int]:
- """
- If workspace_id is provided, then listing in workspace is considered/preffered
- """
- assert (
- order_by.field in projects.columns
- ), "Guaranteed by ProjectListWithJsonStrParams" # nosec
+ if filter_tag_ids_list is None:
+ filter_tag_ids_list = []
- # helper
- private_workspace_user_id_or_none: UserID | None = (
- None if workspace_id else user_id
- )
-
- async with self.engine.acquire() as conn:
-
- _join_query = (
- projects.join(projects_to_products, isouter=True)
- .join(self.access_rights_subquery, isouter=True)
- .join(
- projects_to_folders,
- (
- (projects_to_folders.c.project_uuid == projects.c.uuid)
- & (
- projects_to_folders.c.user_id
- == private_workspace_user_id_or_none
- )
- ),
- isouter=True,
- )
- )
-
- query = (
- sa.select(
- *[
- col
- for col in projects.columns
- if col.name not in ["access_rights"]
- ],
- self.access_rights_subquery.c.access_rights,
- projects_to_products.c.product_name,
- projects_to_folders.c.folder_id,
- )
- .select_from(_join_query)
- .where(
- (
- (projects_to_products.c.product_name == product_name)
- # This was added for backward compatibility, including old projects not in the projects_to_products table.
- | (projects_to_products.c.product_name.is_(None))
- )
- & (
- projects_to_folders.c.folder_id == folder_id
- if folder_id
- else projects_to_folders.c.folder_id.is_(None)
- )
- & (
- projects.c.workspace_id == workspace_id # <-- Shared workspace
- if workspace_id
- else projects.c.workspace_id.is_(None) # <-- Private workspace
- )
- )
- )
-
- # attributes filters
- # None, true, false = all, attribute, !attribute
- attributes_filters = []
- if filter_by_project_type is not None:
- attributes_filters.append(
- projects.c.type == filter_by_project_type.value
- )
-
- if filter_hidden is not None:
- attributes_filters.append(projects.c.hidden.is_(filter_hidden))
-
- if filter_published is not None:
- attributes_filters.append(projects.c.published.is_(filter_published))
-
- if filter_trashed is not None:
- attributes_filters.append(
- # marked explicitly as trashed
- (
- projects.c.trashed_at.is_not(None)
- & projects.c.trashed_explicitly.is_(True)
- )
- if filter_trashed
- # not marked as trashed
- else projects.c.trashed_at.is_(None)
- )
- query = query.where(sa.and_(*attributes_filters))
-
- if private_workspace_user_id_or_none:
- # If Private workspace we check to which projects user has access
- user_groups: list[RowProxy] = await self._list_user_groups(
- conn, user_id
- )
- query = query.where(
- (projects.c.prj_owner == user_id)
- | sa.text(
- f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
- )
- )
-
- if search:
- query = query.join(
- users, users.c.id == projects.c.prj_owner, isouter=True
- )
- query = query.where(
- (projects.c.name.ilike(f"%{search}%"))
- | (projects.c.description.ilike(f"%{search}%"))
- | (projects.c.uuid.ilike(f"%{search}%"))
- | (users.c.name.ilike(f"%{search}%"))
- )
-
- if order_by.direction == OrderDirection.ASC:
- query = query.order_by(sa.asc(getattr(projects.c, order_by.field)))
- else:
- query = query.order_by(sa.desc(getattr(projects.c, order_by.field)))
-
- # page meta
- total_number_of_projects = await conn.scalar(
- query.with_only_columns(func.count()).order_by(None)
- )
- assert total_number_of_projects is not None # nosec
-
- # page data
- prjs, prj_types = await self._execute_without_permission_check(
- conn,
- user_id=user_id,
- select_projects_query=query.offset(offset).limit(limit),
- filter_by_services=filter_by_services,
- )
-
- return (
- prjs,
- prj_types,
- total_number_of_projects,
- )
-
- async def list_projects_full_search(
- self,
- *,
- user_id: PositiveInt,
- product_name: ProductName,
- filter_by_services: list[dict] | None = None,
- text: str | None = None,
- offset: int | None = 0,
- limit: int | None = None,
- tag_ids_list: list[int],
- order_by: OrderBy = OrderBy(
- field=IDStr("last_change_date"), direction=OrderDirection.DESC
- ),
- ) -> tuple[list[dict[str, Any]], list[ProjectType], int]:
async with self.engine.acquire() as conn:
user_groups: list[RowProxy] = await self._list_user_groups(conn, user_id)
@@ -552,124 +406,212 @@ async def list_projects_full_search(
).group_by(projects_tags.c.project_id)
).subquery("project_tags_subquery")
- private_workspace_query = (
- sa.select(
- *[
- col
- for col in projects.columns
- if col.name not in ["access_rights"]
- ],
- self.access_rights_subquery.c.access_rights,
- projects_to_products.c.product_name,
- projects_to_folders.c.folder_id,
- sa.func.coalesce(
- project_tags_subquery.c.tags,
- sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).label("tags"),
+ ###
+ # Private workspace query
+ ###
+
+ if workspace_query.workspace_scope is not WorkspaceScope.SHARED:
+ assert workspace_query.workspace_scope in ( # nosec
+ WorkspaceScope.PRIVATE,
+ WorkspaceScope.ALL,
)
- .select_from(
- projects.join(self.access_rights_subquery, isouter=True)
- .join(projects_to_products)
- .join(
- projects_to_folders,
+
+ private_workspace_query = (
+ sa.select(
+ *[
+ col
+ for col in projects.columns
+ if col.name not in ["access_rights"]
+ ],
+ self.access_rights_subquery.c.access_rights,
+ projects_to_products.c.product_name,
+ projects_to_folders.c.folder_id,
+ sa.func.coalesce(
+ project_tags_subquery.c.tags,
+ sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
+ ).label("tags"),
+ )
+ .select_from(
+ projects.join(self.access_rights_subquery, isouter=True)
+ .join(projects_to_products)
+ .join(
+ projects_to_folders,
+ (
+ (projects_to_folders.c.project_uuid == projects.c.uuid)
+ & (projects_to_folders.c.user_id == user_id)
+ ),
+ isouter=True,
+ )
+ .join(project_tags_subquery, isouter=True)
+ )
+ .where(
(
- (projects_to_folders.c.project_uuid == projects.c.uuid)
- & (projects_to_folders.c.user_id == user_id)
- ),
- isouter=True,
+ (projects.c.prj_owner == user_id)
+ | sa.text(
+ f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
+ )
+ )
+ & (projects.c.workspace_id.is_(None)) # <-- Private workspace
+ & (projects_to_products.c.product_name == product_name)
)
- .join(project_tags_subquery, isouter=True)
)
- .where(
- (
- (projects.c.prj_owner == user_id)
- | sa.text(
- f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
+ if filter_by_text is not None:
+ private_workspace_query = private_workspace_query.join(
+ users, users.c.id == projects.c.prj_owner, isouter=True
+ )
+ else:
+ private_workspace_query = None
+
+ ###
+ # Shared workspace query
+ ###
+
+ if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE:
+ assert workspace_query.workspace_scope in (
+ WorkspaceScope.SHARED,
+ WorkspaceScope.ALL,
+ ) # nosec
+
+ shared_workspace_query = (
+ sa.select(
+ *[
+ col
+ for col in projects.columns
+ if col.name not in ["access_rights"]
+ ],
+ workspace_access_rights_subquery.c.access_rights,
+ projects_to_products.c.product_name,
+ projects_to_folders.c.folder_id,
+ sa.func.coalesce(
+ project_tags_subquery.c.tags,
+ sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
+ ).label("tags"),
+ )
+ .select_from(
+ projects.join(
+ workspace_access_rights_subquery,
+ projects.c.workspace_id
+ == workspace_access_rights_subquery.c.workspace_id,
)
+ .join(projects_to_products)
+ .join(
+ projects_to_folders,
+ (
+ (projects_to_folders.c.project_uuid == projects.c.uuid)
+ & (projects_to_folders.c.user_id.is_(None))
+ ),
+ isouter=True,
+ )
+ .join(project_tags_subquery, isouter=True)
)
- & (projects.c.workspace_id.is_(None))
- & (projects_to_products.c.product_name == product_name)
- & (projects.c.hidden.is_(False))
- & (projects.c.type == ProjectType.STANDARD)
- & (
- (projects.c.name.ilike(f"%{text}%"))
- | (projects.c.description.ilike(f"%{text}%"))
- | (projects.c.uuid.ilike(f"%{text}%"))
+ .where(
+ (
+ sa.text(
+ f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
+ )
+ )
+ & (projects_to_products.c.product_name == product_name)
)
)
- )
-
- if tag_ids_list:
- private_workspace_query = private_workspace_query.where(
- sa.func.coalesce(
- project_tags_subquery.c.tags,
- sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).op("@>")(tag_ids_list)
- )
+ if workspace_query.workspace_scope == WorkspaceScope.ALL:
+ shared_workspace_query = shared_workspace_query.where(
+ projects.c.workspace_id.is_not(
+ None
+ ) # <-- All shared workspaces
+ )
+ if filter_by_text is not None:
+ shared_workspace_query = shared_workspace_query.join(
+ users, users.c.id == projects.c.prj_owner, isouter=True
+ )
- shared_workspace_query = (
- sa.select(
- *[
- col
- for col in projects.columns
- if col.name not in ["access_rights"]
- ],
- workspace_access_rights_subquery.c.access_rights,
- projects_to_products.c.product_name,
- projects_to_folders.c.folder_id,
- sa.func.coalesce(
- project_tags_subquery.c.tags,
- sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).label("tags"),
- )
- .select_from(
- projects.join(
- workspace_access_rights_subquery,
+ else:
+ assert (
+ workspace_query.workspace_scope == WorkspaceScope.SHARED
+ ) # nosec
+ shared_workspace_query = shared_workspace_query.where(
projects.c.workspace_id
- == workspace_access_rights_subquery.c.workspace_id,
- )
- .join(projects_to_products)
- .join(
- projects_to_folders,
- (
- (projects_to_folders.c.project_uuid == projects.c.uuid)
- & (projects_to_folders.c.user_id.is_(None))
- ),
- isouter=True,
+ == workspace_query.workspace_id # <-- Specific shared workspace
)
- .join(project_tags_subquery, isouter=True)
+
+ else:
+ shared_workspace_query = None
+
+ ###
+ # Attributes Filters
+ ###
+
+ attributes_filters: list[ColumnElement] = []
+ if filter_by_project_type is not None:
+ attributes_filters.append(
+ projects.c.type == filter_by_project_type.value
)
- .where(
+
+ if filter_hidden is not None:
+ attributes_filters.append(projects.c.hidden.is_(filter_hidden))
+
+ if filter_published is not None:
+ attributes_filters.append(projects.c.published.is_(filter_published))
+
+ if filter_trashed is not None:
+ attributes_filters.append(
+ # marked explicitly as trashed
(
- sa.text(
- f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})"
- )
- )
- & (projects.c.workspace_id.is_not(None))
- & (projects_to_products.c.product_name == product_name)
- & (projects.c.hidden.is_(False))
- & (projects.c.type == ProjectType.STANDARD)
- & (
- (projects.c.name.ilike(f"%{text}%"))
- | (projects.c.description.ilike(f"%{text}%"))
- | (projects.c.uuid.ilike(f"%{text}%"))
+ projects.c.trashed_at.is_not(None)
+ & projects.c.trashed_explicitly.is_(True)
)
+ if filter_trashed
+ # not marked as trashed
+ else projects.c.trashed_at.is_(None)
)
- )
-
- if tag_ids_list:
- shared_workspace_query = shared_workspace_query.where(
+ if filter_by_text is not None:
+ attributes_filters.append(
+ (projects.c.name.ilike(f"%{filter_by_text}%"))
+ | (projects.c.description.ilike(f"%{filter_by_text}%"))
+ | (projects.c.uuid.ilike(f"%{filter_by_text}%"))
+ | (users.c.name.ilike(f"%{filter_by_text}%"))
+ )
+ if filter_tag_ids_list:
+ attributes_filters.append(
sa.func.coalesce(
project_tags_subquery.c.tags,
sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)),
- ).op("@>")(tag_ids_list)
+ ).op("@>")(filter_tag_ids_list)
+ )
+ if folder_query.folder_scope is not FolderScope.ALL:
+ if folder_query.folder_scope == FolderScope.SPECIFIC:
+ attributes_filters.append(
+ projects_to_folders.c.folder_id == folder_query.folder_id
+ )
+ else:
+ assert folder_query.folder_scope == FolderScope.ROOT # nosec
+ attributes_filters.append(projects_to_folders.c.folder_id.is_(None))
+
+ ###
+ # Combined
+ ###
+
+ combined_query: CompoundSelect | Select | None = None
+ if (
+ private_workspace_query is not None
+ and shared_workspace_query is not None
+ ):
+ combined_query = sa.union_all(
+ private_workspace_query.where(sa.and_(*attributes_filters)),
+ shared_workspace_query.where(sa.and_(*attributes_filters)),
+ )
+ elif private_workspace_query is not None:
+ combined_query = private_workspace_query.where(
+ sa.and_(*attributes_filters)
+ )
+ elif shared_workspace_query is not None:
+ combined_query = shared_workspace_query.where(
+ sa.and_(*attributes_filters)
)
- combined_query = sa.union_all(
- private_workspace_query, shared_workspace_query
- )
-
- count_query = sa.select(func.count()).select_from(combined_query)
+ if combined_query is None:
+ msg = f"No valid queries were provided to combine. Workspace scope: {workspace_query.workspace_scope}"
+ raise ValueError(msg)
+ count_query = sa.select(func.count()).select_from(combined_query.subquery())
total_count = await conn.scalar(count_query)
if order_by.direction == OrderDirection.ASC:
diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py
index 8904cead4bf..3cda6804797 100644
--- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py
+++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py
@@ -364,9 +364,14 @@ async def test_list_projects_with_innaccessible_services(
data, *_ = await _list_and_assert_projects(
client, expected, headers=s4l_product_headers
)
- assert len(data) == 2
+ # UPDATE (use-case 4): 11.11.2024 - This test was checking backwards compatibility for listing
+ # projects that were not in the projects_to_products table. After refactoring the project listing,
+ # we no longer support this. MD double-checked the last_modified_timestamp on projects
+ # that do not have any product assigned (all of them were before 01-11-2022 with the exception of two
+ # `4b001ad2-8450-11ec-b105-02420a0b02c7` and `d952cbf4-d838-11ec-af92-02420a0bdad4` which were added to osparc product).
+ assert len(data) == 0
data, *_ = await _list_and_assert_projects(client, expected)
- assert len(data) == 2
+ assert len(data) == 0
@pytest.mark.parametrize(
From 2af7f218f196141a18fef4bf6ef75f0248bd7857 Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Mon, 11 Nov 2024 14:28:22 +0100
Subject: [PATCH 03/17] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20UX:=20New=20W?=
=?UTF-8?q?orkspace=20and=20New=20Organization=20(#6699)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../osparc/dashboard/ResourceBrowserBase.js | 6 +-
.../class/osparc/dashboard/ResourceDetails.js | 2 +-
.../class/osparc/dashboard/StudyBrowser.js | 17 +-
.../osparc/dashboard/StudyBrowserHeader.js | 6 +-
.../osparc/dashboard/WorkspaceButtonItem.js | 2 +-
.../osparc/dashboard/WorkspaceButtonNew.js | 29 +--
.../dashboard/WorkspacesAndFoldersTree.js | 2 +-
.../organizations/OrganizationDetails.js | 12 +-
.../organizations/OrganizationsList.js | 26 ++-
.../class/osparc/editor/OrganizationEditor.js | 34 +++-
.../class/osparc/editor/WorkspaceEditor.js | 170 +++++++++++-------
.../source/class/osparc/store/Workspaces.js | 4 +
.../source/class/osparc/study/StudyOptions.js | 69 ++++---
.../client/source/class/osparc/study/Utils.js | 5 +-
.../client/source/class/osparc/utils/Utils.js | 7 +-
.../resource/osparc/tours/s4l_tours.json | 4 +-
16 files changed, 231 insertions(+), 164 deletions(-)
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
index 9334861f11c..31524310535 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
@@ -116,7 +116,11 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
};
osparc.data.Resources.fetch("studies", "getWallet", params)
.then(wallet => {
- if (isStudyCreation || wallet === null || osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null) {
+ if (
+ isStudyCreation ||
+ wallet === null ||
+ osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null
+ ) {
// pop up study options if the study was just created or if it has no wallet assigned or user has no access to it
const resourceSelector = new osparc.study.StudyOptions(studyId);
const win = osparc.study.StudyOptions.popUpInWindow(resourceSelector);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js
index a1ae4d742fa..76e9f628829 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js
@@ -364,7 +364,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", {
const resourceData = this.__resourceData;
if (osparc.utils.Resources.isStudy(resourceData)) {
const id = "Billing";
- const title = this.tr("Billing Settings");
+ const title = this.tr("Tier Settings");
const iconSrc = "@FontAwesome5Solid/cogs/22";
const page = this.__billingSettings = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id);
this.__addOpenButton(page);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
index 7349d7d46b5..288290b06df 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -374,12 +374,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
newWorkspaceCard.setCardKey("new-workspace");
newWorkspaceCard.subscribeToFilterGroup("searchBarFilter");
[
- "createWorkspace",
- "updateWorkspace"
+ "workspaceCreated",
+ "workspaceDeleted",
+ "workspaceUpdated",
].forEach(e => {
- newWorkspaceCard.addListener(e, () => {
- this.__reloadWorkspaces();
- });
+ newWorkspaceCard.addListener(e, () => this.__reloadWorkspaces());
});
this._resourcesContainer.addNewWorkspaceCard(newWorkspaceCard);
},
@@ -1170,7 +1169,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__newStudyBtnClicked: function(button) {
button.setValue(false);
const minStudyData = osparc.data.model.Study.createMinStudyObject();
- const title = osparc.utils.Utils.getUniqueStudyName(minStudyData.name, this._resourcesList);
+ const existingNames = this._resourcesList.map(study => study["name"]);
+ const title = osparc.utils.Utils.getUniqueName(minStudyData.name, existingNames);
minStudyData["name"] = title;
minStudyData["workspaceId"] = this.getCurrentWorkspaceId();
minStudyData["folderId"] = this.getCurrentFolderId();
@@ -1190,7 +1190,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__newPlanBtnClicked: function(templateData, newStudyName) {
// do not override cached template data
const templateCopyData = osparc.utils.Utils.deepCloneObject(templateData);
- const title = osparc.utils.Utils.getUniqueStudyName(newStudyName, this._resourcesList);
+ const existingNames = this._resourcesList.map(study => study["name"]);
+ const title = osparc.utils.Utils.getUniqueName(newStudyName, existingNames);
templateCopyData.name = title;
this._showLoadingPage(this.tr("Creating ") + (newStudyName || osparc.product.Utils.getStudyAlias()));
const contextProps = {
@@ -1411,7 +1412,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
__getBillingMenuButton: function(card) {
- const text = osparc.utils.Utils.capitalize(this.tr("Billing Settings..."));
+ const text = osparc.utils.Utils.capitalize(this.tr("Tier Settings..."));
const studyBillingSettingsButton = new qx.ui.menu.Button(text);
studyBillingSettingsButton["billingSettingsButton"] = true;
studyBillingSettingsButton.addListener("tap", () => card.openBilling(), this);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js
index 9e2ca51b434..87a6a366b58 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js
@@ -339,10 +339,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", {
__editWorkspace: function() {
const workspace = osparc.store.Workspaces.getInstance().getWorkspace(this.getCurrentWorkspaceId());
- const permissionsView = new osparc.editor.WorkspaceEditor(workspace);
+ const workspaceEditor = new osparc.editor.WorkspaceEditor(workspace);
const title = this.tr("Edit Workspace");
- const win = osparc.ui.window.Window.popUpInWindow(permissionsView, title, 300, 200);
- permissionsView.addListener("workspaceUpdated", () => {
+ const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 150);
+ workspaceEditor.addListener("workspaceUpdated", () => {
win.close();
this.__buildLayout();
}, this);
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js
index 5581ec3212b..4d5253410bf 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js
@@ -185,7 +185,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", {
const workspace = this.getWorkspace();
const workspaceEditor = new osparc.editor.WorkspaceEditor(workspace);
const title = this.tr("Edit Workspace");
- const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 200);
+ const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 150);
workspaceEditor.addListener("workspaceUpdated", () => {
win.close();
this.fireDataEvent("workspaceUpdated", workspace.getWorkspaceId());
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js
index fc1526b387d..ac87579355e 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js
@@ -46,26 +46,29 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonNew", {
},
events: {
- "createWorkspace": "qx.event.type.Data",
- "updateWorkspace": "qx.event.type.Data"
+ "workspaceCreated": "qx.event.type.Event",
+ "workspaceDeleted": "qx.event.type.Event",
+ "workspaceUpdated": "qx.event.type.Event",
},
members: {
__itemSelected: function(newVal) {
if (newVal) {
- const workspaceCreator = new osparc.editor.WorkspaceEditor();
+ const workspaceEditor = new osparc.editor.WorkspaceEditor();
const title = this.tr("New Workspace");
- const win = osparc.ui.window.Window.popUpInWindow(workspaceCreator, title, 300, 200);
- workspaceCreator.addListener("workspaceCreated", e => {
- win.close();
- const newWorkspace = e.getData();
- this.fireDataEvent("createWorkspace", newWorkspace.getWorkspaceId(), this);
- const permissionsView = new osparc.share.CollaboratorsWorkspace(newWorkspace);
- const title2 = qx.locale.Manager.tr("Share Workspace");
- osparc.ui.window.Window.popUpInWindow(permissionsView, title2, 500, 500);
- permissionsView.addListener("updateAccessRights", () => this.fireDataEvent("updateWorkspace", newWorkspace.getWorkspaceId()), this);
+ const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 500, 500).set({
+ modal: true,
+ clickAwayClose: false,
});
- workspaceCreator.addListener("cancel", () => win.close());
+ workspaceEditor.addListener("workspaceCreated", () => this.fireEvent("workspaceCreated"));
+ workspaceEditor.addListener("workspaceDeleted", () => this.fireEvent("workspaceDeleted"));
+ workspaceEditor.addListener("workspaceUpdated", () => {
+ win.close();
+ this.fireEvent("workspaceUpdated");
+ }, this);
+ workspaceEditor.addListener("updateAccessRights", () => this.fireEvent("workspaceUpdated"));
+ win.getChildControl("close-button").addListener("tap", () => workspaceEditor.cancel());
+ workspaceEditor.addListener("cancel", () => win.close());
}
this.setValue(false);
}
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js
index c65318bfcd3..7f35c3ff320 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js
@@ -300,7 +300,7 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", {
if (oldParentFolderId === undefined) {
// it was removed, not moved
// remove it from the cached models
- const modelFound = this.__getModel(folder.getWorkspaceId(), folder.getParentFolderId());
+ const modelFound = this.__getModel(folder.getWorkspaceId(), folder.getFolderId());
if (modelFound) {
const index = this.__models.indexOf(modelFound);
if (index > -1) { // only splice array when item is found
diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js
index 6871348d8a0..c9d0501c0cd 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js
@@ -94,17 +94,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationDetails", {
__openEditOrganization: function() {
const org = this.__orgModel;
-
- const newOrg = false;
- const orgEditor = new osparc.editor.OrganizationEditor(newOrg);
- org.bind("gid", orgEditor, "gid");
- org.bind("label", orgEditor, "label");
- org.bind("description", orgEditor, "description");
- org.bind("thumbnail", orgEditor, "thumbnail", {
- converter: val => val ? val : ""
- });
const title = this.tr("Organization Details Editor");
- const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250);
+ const orgEditor = new osparc.editor.OrganizationEditor(org);
+ const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200);
orgEditor.addListener("updateOrg", () => {
this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor);
});
diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js
index 740f54211fa..c2f8656ed83 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js
@@ -99,10 +99,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
allowGrowX: false
});
createOrgBtn.addListener("execute", function() {
- const newOrg = true;
- const orgEditor = new osparc.editor.OrganizationEditor(newOrg);
const title = this.tr("New Organization");
- const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250);
+ const orgEditor = new osparc.editor.OrganizationEditor();
+ const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200);
orgEditor.addListener("createOrg", () => {
this.__createOrganization(win, orgEditor.getChildControl("create"), orgEditor);
});
@@ -176,7 +175,7 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
}
},
- reloadOrganizations: function() {
+ reloadOrganizations: function(orgId) {
this.__orgsUIList.resetSelection();
const orgsModel = this.__orgsModel;
orgsModel.removeAll();
@@ -199,6 +198,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
orgsList.sort(this.self().sortOrganizations);
orgsList.forEach(org => orgsModel.append(qx.data.marshal.Json.createModel(org)));
this.setOrganizationsLoaded(true);
+ if (orgId) {
+ this.fireDataEvent("organizationSelected", orgId);
+ }
});
},
@@ -208,16 +210,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
return;
}
- const newOrg = false;
- const orgEditor = new osparc.editor.OrganizationEditor(newOrg);
- org.bind("gid", orgEditor, "gid");
- org.bind("label", orgEditor, "label");
- org.bind("description", orgEditor, "description");
- org.bind("thumbnail", orgEditor, "thumbnail", {
- converter: val => val ? val : ""
- });
const title = this.tr("Organization Details Editor");
- const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250);
+ const orgEditor = new osparc.editor.OrganizationEditor(org);
+ const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200);
orgEditor.addListener("updateOrg", () => {
this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor);
});
@@ -287,14 +282,15 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", {
}
};
osparc.data.Resources.fetch("organizations", "post", params)
- .then(() => {
+ .then(org => {
osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully created"));
button.setFetching(false);
osparc.store.Store.getInstance().reset("organizations");
// reload "profile", "organizations" are part of the information in this endpoint
osparc.data.Resources.getOne("profile", {}, null, false)
.then(() => {
- this.reloadOrganizations();
+ // open it
+ this.reloadOrganizations(org["gid"]);
});
})
.catch(err => {
diff --git a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js
index f4be5233d2f..b528e760c01 100644
--- a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js
+++ b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js
@@ -18,7 +18,7 @@
qx.Class.define("osparc.editor.OrganizationEditor", {
extend: qx.ui.core.Widget,
- construct: function(newOrg = true) {
+ construct: function(organization) {
this.base(arguments);
this._setLayout(new qx.ui.layout.VBox(8));
@@ -29,7 +29,27 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
manager.add(title);
this.getChildControl("description");
this.getChildControl("thumbnail");
- newOrg ? this.getChildControl("create") : this.getChildControl("save");
+ organization ? this.getChildControl("save") : this.getChildControl("create");
+
+ if (organization) {
+ organization.bind("gid", this, "gid");
+ organization.bind("label", this, "label");
+ organization.bind("description", this, "description");
+ organization.bind("thumbnail", this, "thumbnail", {
+ converter: val => val ? val : ""
+ });
+ } else {
+ osparc.store.Store.getInstance().getGroupsOrganizations()
+ .then(orgs => {
+ const existingNames = orgs.map(org => org["label"]);
+ const defaultName = osparc.utils.Utils.getUniqueName("New Organization", existingNames)
+ title.setValue(defaultName);
+ })
+ .catch(err => {
+ console.error(err);
+ title.setValue("New Organization");
+ });
+ }
this.addListener("appear", () => {
title.focus();
@@ -82,7 +102,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
font: "text-14",
backgroundColor: "background-main",
placeholder: this.tr("Title"),
- height: 35
+ height: 30,
});
this.bind("label", control, "value");
control.bind("value", this, "label");
@@ -90,12 +110,10 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
break;
}
case "description": {
- control = new qx.ui.form.TextArea().set({
+ control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Description"),
- autoSize: true,
- minHeight: 70,
- maxHeight: 140
+ height: 30,
});
this.bind("description", control, "value");
control.bind("value", this, "description");
@@ -106,7 +124,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", {
control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Thumbnail"),
- height: 35
+ height: 30,
});
this.bind("thumbnail", control, "value");
control.bind("value", this, "thumbnail");
diff --git a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js
index 6b89ee2af78..dab5a9807c3 100644
--- a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js
+++ b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js
@@ -33,20 +33,33 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
manager.add(title);
this.getChildControl("description");
this.getChildControl("thumbnail");
- workspace ? this.getChildControl("save") : this.getChildControl("create");
+ this.getChildControl("cancel");
+ this.getChildControl("save");
if (workspace) {
- this.__workspaceId = workspace.getWorkspaceId();
- this.set({
- label: workspace.getName(),
- description: workspace.getDescription(),
- thumbnail: workspace.getThumbnail(),
- });
+ // editing
+ this.setWorkspace(workspace);
+ } else {
+ // creating
+ this.__creatingWorkspace = true;
+ this.__createWorkspace()
+ .then(newWorkspace => {
+ this.setWorkspace(newWorkspace);
+ this.fireDataEvent("workspaceCreated");
+ this.getChildControl("sharing");
+ });
}
this.addListener("appear", this.__onAppear, this);
},
properties: {
+ workspace: {
+ check: "osparc.data.model.Workspace",
+ init: null,
+ nullable: false,
+ apply: "__applyWorkspace"
+ },
+
label: {
check: "String",
init: "",
@@ -70,13 +83,26 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
},
events: {
- "workspaceCreated": "qx.event.type.Data",
+ "workspaceCreated": "qx.event.type.Event",
+ "workspaceDeleted": "qx.event.type.Event",
"workspaceUpdated": "qx.event.type.Event",
+ "updateAccessRights": "qx.event.type.Event",
"cancel": "qx.event.type.Event"
},
+ statics: {
+ POS: {
+ INTRO: 0,
+ TITLE: 1,
+ DESCRIPTION: 2,
+ THUMBNAIL: 3,
+ SHARING: 4,
+ BUTTONS: 5,
+ }
+ },
+
members: {
- __workspaceId: null,
+ __creatingWorkspace: null,
_createChildControlImpl: function(id) {
let control;
@@ -89,7 +115,7 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
rich: true,
wrap: true
});
- this._add(control);
+ this._addAt(control, this.self().POS.INTRO);
break;
}
case "title": {
@@ -97,71 +123,64 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
font: "text-14",
backgroundColor: "background-main",
placeholder: this.tr("Title"),
- minHeight: 27
+ height: 30,
});
this.bind("label", control, "value");
control.bind("value", this, "label");
- this._add(control);
+ this._addAt(control, this.self().POS.TITLE);
break;
}
case "description": {
- control = new qx.ui.form.TextArea().set({
+ control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Description"),
- autoSize: true,
- minHeight: 70,
+ height: 30,
});
this.bind("description", control, "value");
control.bind("value", this, "description");
- this._add(control);
+ this._addAt(control, this.self().POS.DESCRIPTION);
break;
}
case "thumbnail": {
control = new qx.ui.form.TextField().set({
font: "text-14",
placeholder: this.tr("Thumbnail"),
+ height: 30,
});
this.bind("thumbnail", control, "value");
control.bind("value", this, "thumbnail");
- this._add(control);
+ this._addAt(control, this.self().POS.THUMBNAIL);
break;
}
- case "create": {
- const buttons = this.getChildControl("buttonsLayout");
- control = new osparc.ui.form.FetchButton(this.tr("Create")).set({
- appearance: "form-button"
- });
- control.addListener("execute", () => {
- if (this.__validator.validate()) {
- this.__createWorkspace(control);
- }
- }, this);
- buttons.addAt(control, 1);
+ case "sharing": {
+ control = new osparc.share.CollaboratorsWorkspace(this.getWorkspace());
+ control.addListener("updateAccessRights", () => this.fireDataEvent("updateAccessRights", this.getWorkspace().getWorkspaceId()), this);
+ this._addAt(control, this.self().POS.SHARING);
+ break;
+ }
+ case "buttons-layout": {
+ control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({
+ alignX: "right"
+ }));
+ this._addAt(control, this.self().POS.BUTTONS);
break;
}
case "save": {
- const buttons = this.getChildControl("buttonsLayout");
+ const buttons = this.getChildControl("buttons-layout");
control = new osparc.ui.form.FetchButton(this.tr("Save")).set({
appearance: "form-button"
});
- control.addListener("execute", () => {
- if (this.__validator.validate()) {
- this.__editWorkspace(control);
- }
- }, this);
+ control.addListener("execute", () => this.__saveWorkspace(control), this);
buttons.addAt(control, 1);
break;
}
- case "buttonsLayout": {
- control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({
- alignX: "right"
- }));
- const cancelButton = new qx.ui.form.Button(this.tr("Cancel")).set({
+ case "cancel": {
+ const buttons = this.getChildControl("buttons-layout");
+ control = new qx.ui.form.Button(this.tr("Cancel")).set({
appearance: "form-button-text"
});
- cancelButton.addListener("execute", () => this.fireEvent("cancel"), this);
- control.addAt(cancelButton, 0);
- this._add(control);
+ control.addListener("execute", () => this.cancel(), this);
+ buttons.addAt(control, 0);
break;
}
}
@@ -169,36 +188,55 @@ qx.Class.define("osparc.editor.WorkspaceEditor", {
return control || this.base(arguments, id);
},
- __createWorkspace: function(createButton) {
- createButton.setFetching(true);
+ __applyWorkspace: function(workspace) {
+ this.set({
+ label: workspace.getName(),
+ description: workspace.getDescription(),
+ thumbnail: workspace.getThumbnail(),
+ });
+ },
+
+ __createWorkspace: function() {
+ const workspaceStore = osparc.store.Workspaces.getInstance();
+ const workspaces = workspaceStore.getWorkspaces();
+ const existingNames = workspaces.map(workspace => workspace.getName());
+ const defaultName = osparc.utils.Utils.getUniqueName("New Workspace", existingNames)
const newWorkspaceData = {
- name: this.getLabel(),
+ name: this.getLabel() || defaultName,
description: this.getDescription(),
thumbnail: this.getThumbnail(),
};
- osparc.store.Workspaces.getInstance().postWorkspace(newWorkspaceData)
- .then(newWorkspace => this.fireDataEvent("workspaceCreated", newWorkspace))
- .catch(err => {
- console.error(err);
- osparc.FlashMessenger.logAs(err.message, "ERROR");
- })
- .finally(() => createButton.setFetching(false));
+ return workspaceStore.postWorkspace(newWorkspaceData)
},
- __editWorkspace: function(editButton) {
- editButton.setFetching(true);
- const updateData = {
- name: this.getLabel(),
- description: this.getDescription(),
- thumbnail: this.getThumbnail(),
- };
- osparc.store.Workspaces.getInstance().putWorkspace(this.__workspaceId, updateData)
- .then(() => this.fireEvent("workspaceUpdated"))
- .catch(err => {
- console.error(err);
- osparc.FlashMessenger.logAs(err.message, "ERROR");
- })
- .finally(() => editButton.setFetching(false));
+ __saveWorkspace: function(editButton) {
+ if (this.__validator.validate()) {
+ editButton.setFetching(true);
+ const updateData = {
+ name: this.getLabel(),
+ description: this.getDescription(),
+ thumbnail: this.getThumbnail(),
+ };
+ osparc.store.Workspaces.getInstance().putWorkspace(this.getWorkspace().getWorkspaceId(), updateData)
+ .then(() => this.fireEvent("workspaceUpdated"))
+ .catch(err => {
+ console.error(err);
+ osparc.FlashMessenger.logAs(err.message, "ERROR");
+ })
+ .finally(() => editButton.setFetching(false));
+ }
+ },
+
+ cancel: function() {
+ if (this.__creatingWorkspace) {
+ osparc.store.Workspaces.getInstance().deleteWorkspace(this.getWorkspace().getWorkspaceId())
+ .then(() => this.fireEvent("workspaceDeleted"))
+ .catch(err => {
+ console.error(err);
+ osparc.FlashMessenger.logAs(err.message, "ERROR");
+ });
+ }
+ this.fireEvent("cancel");
},
__onAppear: function() {
diff --git a/services/static-webserver/client/source/class/osparc/store/Workspaces.js b/services/static-webserver/client/source/class/osparc/store/Workspaces.js
index 8d803de0af5..253ac714a1d 100644
--- a/services/static-webserver/client/source/class/osparc/store/Workspaces.js
+++ b/services/static-webserver/client/source/class/osparc/store/Workspaces.js
@@ -197,6 +197,10 @@ qx.Class.define("osparc.store.Workspaces", {
return this.workspacesCached.find(w => w.getWorkspaceId() === workspaceId);
},
+ getWorkspaces: function() {
+ return this.workspacesCached;
+ },
+
__addToCache: function(workspace) {
const found = this.workspacesCached.find(w => w.getWorkspaceId() === workspace.getWorkspaceId());
if (!found) {
diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
index 54ba001d6d6..9922ec017e3 100644
--- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
+++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
@@ -23,28 +23,17 @@ qx.Class.define("osparc.study.StudyOptions", {
this._setLayout(new qx.ui.layout.VBox(15));
- this.__studyId = studyId;
-
- const params = {
- url: {
- studyId
- }
- };
- Promise.all([
- osparc.data.Resources.getOne("studies", params),
- osparc.data.Resources.fetch("studies", "getWallet", params)
- ])
- .then(values => {
- const studyData = values[0];
- this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData);
- if (values[1] && "walletId" in values[1]) {
- this.__projectWalletId = values[1]["walletId"];
- }
- this.__buildLayout();
- });
+ this.setStudyId(studyId);
},
properties: {
+ studyId: {
+ check: "String",
+ init: null,
+ nullable: false,
+ apply: "__fetchStudy"
+ },
+
wallet: {
check: "osparc.data.model.Wallet",
init: null,
@@ -93,9 +82,8 @@ qx.Class.define("osparc.study.StudyOptions", {
},
members: {
- __studyId: null,
__studyData: null,
- __projectWalletId: null,
+ __studyWalletId: null,
_createChildControlImpl: function(id) {
let control;
@@ -105,7 +93,7 @@ qx.Class.define("osparc.study.StudyOptions", {
this._addAt(control, 0);
break;
case "title-field":
- control = new qx.ui.form.TextField(this.__studyData["name"]).set({
+ control = new qx.ui.form.TextField().set({
maxWidth: 220
});
this.getChildControl("title-layout").add(control);
@@ -192,6 +180,27 @@ qx.Class.define("osparc.study.StudyOptions", {
return control || this.base(arguments, id);
},
+ __fetchStudy: function(studyId) {
+ const params = {
+ url: {
+ studyId
+ }
+ };
+ Promise.all([
+ osparc.data.Resources.getOne("studies", params),
+ osparc.data.Resources.fetch("studies", "getWallet", params)
+ ])
+ .then(values => {
+ const studyData = values[0];
+ this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData);
+
+ if (values[1] && "walletId" in values[1]) {
+ this.__studyWalletId = values[1]["walletId"];
+ }
+ this.__buildLayout();
+ });
+ },
+
__applyWallet: function(wallet) {
if (wallet) {
const walletSelector = this.getChildControl("wallet-selector");
@@ -214,15 +223,16 @@ qx.Class.define("osparc.study.StudyOptions", {
__buildTopSummaryLayout: function() {
const store = osparc.store.Store.getInstance();
- this._createChildControlImpl("title-label");
const titleField = this.getChildControl("title-field");
+ if (this.__studyData) {
+ titleField.setValue(this.__studyData["name"]);
+ }
titleField.addListener("appear", () => {
titleField.focus();
titleField.activate();
});
// Wallet Selector
- this._createChildControlImpl("wallet-selector-label");
const walletSelector = this.getChildControl("wallet-selector");
const wallets = store.getWallets();
@@ -241,8 +251,8 @@ qx.Class.define("osparc.study.StudyOptions", {
}
});
const preferredWallet = store.getPreferredWallet();
- if (wallets.find(wallet => wallet.getWalletId() === parseInt(this.__projectWalletId))) {
- selectWallet(this.__projectWalletId);
+ if (wallets.find(wallet => wallet.getWalletId() === parseInt(this.__studyWalletId))) {
+ selectWallet(this.__studyWalletId);
} else if (preferredWallet) {
selectWallet(preferredWallet.getWalletId());
} else if (!osparc.desktop.credits.Utils.autoSelectActiveWallet(walletSelector)) {
@@ -283,17 +293,18 @@ qx.Class.define("osparc.study.StudyOptions", {
// first, update the name if necessary
const titleSelection = this.getChildControl("title-field").getValue();
- if (this.__studyData["name"] !== titleSelection) {
+ if (this.__studyData && this.__studyData["name"] !== titleSelection) {
await this.__updateName(this.__studyData, titleSelection);
}
// second, update the wallet if necessary
const store = osparc.store.Store.getInstance();
const walletSelection = this.getChildControl("wallet-selector").getSelection();
- if (walletSelection.length && walletSelection[0]["walletId"]) {
+ const studyId = this.getStudyId();
+ if (studyId && walletSelection.length && walletSelection[0]["walletId"]) {
const params = {
url: {
- "studyId": this.__studyData["uuid"],
+ studyId,
"walletId": walletSelection[0]["walletId"]
}
};
diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js
index dab2bd53bd8..0240d263e47 100644
--- a/services/static-webserver/client/source/class/osparc/study/Utils.js
+++ b/services/static-webserver/client/source/class/osparc/study/Utils.js
@@ -116,7 +116,8 @@ qx.Class.define("osparc.study.Utils", {
newStudyLabel = metadata["name"];
}
if (existingStudies) {
- const title = osparc.utils.Utils.getUniqueStudyName(newStudyLabel, existingStudies);
+ const existingNames = existingStudies.map(study => study["name"]);
+ const title = osparc.utils.Utils.getUniqueName(newStudyLabel, existingNames);
minStudyData["name"] = title;
} else {
minStudyData["name"] = newStudyLabel;
@@ -234,7 +235,7 @@ qx.Class.define("osparc.study.Utils", {
// update task
osparc.widget.ProgressSequence.updateTaskProgress(existingTask, {
value: percent,
- progressLabel: percent*100 + "%"
+ progressLabel: parseFloat((percent*100).toFixed(2)) + "%"
});
} else {
// new task
diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js
index 5c751c2ee8f..b095d95eee2 100644
--- a/services/static-webserver/client/source/class/osparc/utils/Utils.js
+++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js
@@ -277,12 +277,11 @@ qx.Class.define("osparc.utils.Utils", {
return reloadButton;
},
- getUniqueStudyName: function(preferredName, list) {
+ getUniqueName: function(preferredName, existingNames) {
let title = preferredName;
- const existingTitles = list.map(study => study.name);
- if (existingTitles.includes(title)) {
+ if (existingNames.includes(title)) {
let cont = 1;
- while (existingTitles.includes(`${title} (${cont})`)) {
+ while (existingNames.includes(`${title} (${cont})`)) {
cont++;
}
title += ` (${cont})`;
diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json
index cacb9ffb83d..492544fa598 100644
--- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json
+++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json
@@ -7,7 +7,7 @@
"steps": [{
"anchorEl": "osparc-test-id=dashboardTabs",
"title": "Dashboard Menu",
- "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials, Services and Data.",
+ "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials and Services.",
"placement": "bottom"
}, {
"beforeClick": {
@@ -28,7 +28,7 @@
"selector": "osparc-test-id=servicesTabBtn"
},
"anchorEl": "osparc-test-id=servicesTabBtn",
- "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.",
+ "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.",
"placement": "bottom"
}]
},
From 8f182d3c07fd261f56f0e915204f4acf50da8504 Mon Sep 17 00:00:00 2001
From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com>
Date: Mon, 11 Nov 2024 18:59:50 +0100
Subject: [PATCH 04/17] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Migrates=20folders?=
=?UTF-8?q?=20and=20workspaces=20repositories=20to=20asyncpg=20(#6688)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../folders/_folders_api.py | 2 +-
.../folders/_folders_db.py | 130 +++++++-----------
.../folders/_trash_api.py | 70 +++++-----
.../workspaces/_groups_db.py | 40 ++++--
.../workspaces/_workspaces_db.py | 54 +++++---
5 files changed, 145 insertions(+), 151 deletions(-)
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
index 0344124abb6..043527d2def 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
@@ -262,7 +262,7 @@ async def update_folder(
folder_db = await folders_db.update(
app,
- folder_id=folder_id,
+ folders_id_or_ids=folder_id,
name=name,
parent_folder_id=parent_folder_id,
product_name=product_name,
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
index 0ee44c17199..561bcb64c9e 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
@@ -19,11 +19,16 @@
from simcore_postgres_database.models.folders_v2 import folders_v2
from simcore_postgres_database.models.projects import projects
from simcore_postgres_database.models.projects_to_folders import projects_to_folders
+from simcore_postgres_database.utils_repos import (
+ pass_or_acquire_connection,
+ transaction_context,
+)
from sqlalchemy import func
+from sqlalchemy.ext.asyncio import AsyncConnection
from sqlalchemy.orm import aliased
from sqlalchemy.sql import asc, desc, select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_asyncpg_engine
from .errors import FolderAccessForbiddenError, FolderNotFoundError
_logger = logging.getLogger(__name__)
@@ -55,6 +60,7 @@ def as_dict_exclude_unset(**params) -> dict[str, Any]:
async def create(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
created_by_gid: GroupID,
folder_name: str,
@@ -67,8 +73,8 @@ async def create(
user_id is not None and workspace_id is not None
), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one."
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
folders_v2.insert()
.values(
name=folder_name,
@@ -88,6 +94,7 @@ async def create(
async def list_(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
content_of_folder_id: FolderID | None,
user_id: UserID | None,
@@ -142,18 +149,17 @@ async def list_(
list_query = base_query.order_by(desc(getattr(folders_v2.c, order_by.field)))
list_query = list_query.offset(offset).limit(limit)
- async with get_database_engine(app).acquire() as conn:
- count_result = await conn.execute(count_query)
- total_count = await count_result.scalar()
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ total_count = await conn.scalar(count_query)
- result = await conn.execute(list_query)
- rows = await result.fetchall() or []
- results: list[FolderDB] = [FolderDB.from_orm(row) for row in rows]
- return cast(int, total_count), results
+ result = await conn.stream(list_query)
+ folders: list[FolderDB] = [FolderDB.from_orm(row) async for row in result]
+ return cast(int, total_count), folders
async def get(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
@@ -167,8 +173,8 @@ async def get(
)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(query)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(query)
row = await result.first()
if row is None:
raise FolderAccessForbiddenError(
@@ -179,6 +185,7 @@ async def get(
async def get_for_user_or_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
@@ -203,8 +210,8 @@ async def get_for_user_or_workspace(
else:
query = query.where(folders_v2.c.workspace_id == workspace_id)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(query)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(query)
row = await result.first()
if row is None:
raise FolderAccessForbiddenError(
@@ -213,8 +220,10 @@ async def get_for_user_or_workspace(
return FolderDB.from_orm(row)
-async def _update_impl(
+async def update(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
folders_id_or_ids: FolderID | set[FolderID],
product_name: ProductName,
# updatable columns
@@ -247,64 +256,22 @@ async def _update_impl(
# single-update
query = query.where(folders_v2.c.folder_id == folders_id_or_ids)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(query)
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(query)
row = await result.first()
if row is None:
raise FolderNotFoundError(reason=f"Folder {folders_id_or_ids} not found.")
return FolderDB.from_orm(row)
-async def update_batch(
- app: web.Application,
- *folder_id: FolderID,
- product_name: ProductName,
- # updatable columns
- name: str | UnSet = _unset,
- parent_folder_id: FolderID | None | UnSet = _unset,
- trashed_at: datetime | None | UnSet = _unset,
- trashed_explicitly: bool | UnSet = _unset,
-) -> FolderDB:
- return await _update_impl(
- app=app,
- folders_id_or_ids=set(folder_id),
- product_name=product_name,
- name=name,
- parent_folder_id=parent_folder_id,
- trashed_at=trashed_at,
- trashed_explicitly=trashed_explicitly,
- )
-
-
-async def update(
- app: web.Application,
- *,
- folder_id: FolderID,
- product_name: ProductName,
- # updatable columns
- name: str | UnSet = _unset,
- parent_folder_id: FolderID | None | UnSet = _unset,
- trashed_at: datetime | None | UnSet = _unset,
- trashed_explicitly: bool | UnSet = _unset,
-) -> FolderDB:
- return await _update_impl(
- app=app,
- folders_id_or_ids=folder_id,
- product_name=product_name,
- name=name,
- parent_folder_id=parent_folder_id,
- trashed_at=trashed_at,
- trashed_explicitly=trashed_explicitly,
- )
-
-
async def delete_recursively(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -330,10 +297,9 @@ async def delete_recursively(
# Step 4: Execute the query to get all descendants
final_query = select(folder_hierarchy_cte)
- result = await conn.execute(final_query)
- rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
- await result.fetchall() or []
- )
+ result = await conn.stream(final_query)
+ # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
+ rows = [row async for row in result]
# Sort folders so that child folders come first
sorted_folders = sorted(
@@ -347,6 +313,7 @@ async def delete_recursively(
async def get_projects_recursively_only_if_user_is_owner(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
private_workspace_user_id_or_none: UserID | None,
@@ -361,7 +328,8 @@ async def get_projects_recursively_only_if_user_is_owner(
or the `users_to_groups` table for private workspace projects.
"""
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -370,6 +338,7 @@ async def get_projects_recursively_only_if_user_is_owner(
& (folders_v2.c.product_name == product_name)
)
folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True)
+
# Step 2: Define the recursive case
folder_alias = aliased(folders_v2)
recursive_query = select(
@@ -380,16 +349,15 @@ async def get_projects_recursively_only_if_user_is_owner(
folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id,
)
)
+
# Step 3: Combine base and recursive cases into a CTE
folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query)
+
# Step 4: Execute the query to get all descendants
final_query = select(folder_hierarchy_cte)
- result = await conn.execute(final_query)
- rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
- await result.fetchall() or []
- )
-
- folder_ids = [item[0] for item in rows]
+ result = await conn.stream(final_query)
+ # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
+ folder_ids = [item[0] async for item in result]
query = (
select(projects_to_folders.c.project_uuid)
@@ -402,19 +370,19 @@ async def get_projects_recursively_only_if_user_is_owner(
if private_workspace_user_id_or_none is not None:
query = query.where(projects.c.prj_owner == user_id)
- result = await conn.execute(query)
-
- rows = await result.fetchall() or []
- return [ProjectID(row[0]) for row in rows]
+ result = await conn.stream(query)
+ return [ProjectID(row[0]) async for row in result]
async def get_folders_recursively(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
folder_id: FolderID,
product_name: ProductName,
) -> list[FolderID]:
- async with get_database_engine(app).acquire() as conn, conn.begin():
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+
# Step 1: Define the base case for the recursive CTE
base_query = select(
folders_v2.c.folder_id, folders_v2.c.parent_folder_id
@@ -440,9 +408,5 @@ async def get_folders_recursively(
# Step 4: Execute the query to get all descendants
final_query = select(folder_hierarchy_cte)
- result = await conn.execute(final_query)
- rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)]
- await result.fetchall() or []
- )
-
- return [FolderID(row[0]) for row in rows]
+ result = await conn.stream(final_query)
+ return [FolderID(row[0]) async for row in result]
diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_api.py b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py
index 1cad0415161..b3e1823369a 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_trash_api.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py
@@ -7,7 +7,10 @@
from models_library.products import ProductName
from models_library.projects import ProjectID
from models_library.users import UserID
+from simcore_postgres_database.utils_repos import transaction_context
+from sqlalchemy.ext.asyncio import AsyncConnection
+from ..db.plugin import get_asyncpg_engine
from ..projects._trash_api import trash_project, untrash_project
from ..workspaces.api import check_user_workspace_access
from . import _folders_db
@@ -55,6 +58,7 @@ async def _check_exists_and_access(
async def _folders_db_update(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
product_name: ProductName,
folder_id: FolderID,
@@ -63,7 +67,8 @@ async def _folders_db_update(
# EXPLICIT un/trash
await _folders_db.update(
app,
- folder_id=folder_id,
+ connection,
+ folders_id_or_ids=folder_id,
product_name=product_name,
trashed_at=trashed_at,
trashed_explicitly=trashed_at is not None,
@@ -73,15 +78,16 @@ async def _folders_db_update(
child_folders: set[FolderID] = {
f
for f in await _folders_db.get_folders_recursively(
- app, folder_id=folder_id, product_name=product_name
+ app, connection, folder_id=folder_id, product_name=product_name
)
if f != folder_id
}
if child_folders:
- await _folders_db.update_batch(
+ await _folders_db.update(
app,
- *child_folders,
+ connection,
+ folders_id_or_ids=child_folders,
product_name=product_name,
trashed_at=trashed_at,
trashed_explicitly=False,
@@ -104,40 +110,40 @@ async def trash_folder(
# Trash
trashed_at = arrow.utcnow().datetime
- _logger.debug(
- "TODO: Unit of work for all folders and projects and fails if force_stop_first=%s is False",
- force_stop_first,
- )
-
- # 1. Trash folder and children
- await _folders_db_update(
- app,
- folder_id=folder_id,
- product_name=product_name,
- trashed_at=trashed_at,
- )
-
- # 2. Trash all child projects that I am an owner
- child_projects: list[
- ProjectID
- ] = await _folders_db.get_projects_recursively_only_if_user_is_owner(
- app,
- folder_id=folder_id,
- private_workspace_user_id_or_none=user_id if workspace_is_private else None,
- user_id=user_id,
- product_name=product_name,
- )
+ async with transaction_context(get_asyncpg_engine(app)) as connection:
- for project_id in child_projects:
- await trash_project(
+ # 1. Trash folder and children
+ await _folders_db_update(
app,
+ connection,
+ folder_id=folder_id,
product_name=product_name,
+ trashed_at=trashed_at,
+ )
+
+ # 2. Trash all child projects that I am an owner
+ child_projects: list[
+ ProjectID
+ ] = await _folders_db.get_projects_recursively_only_if_user_is_owner(
+ app,
+ connection,
+ folder_id=folder_id,
+ private_workspace_user_id_or_none=user_id if workspace_is_private else None,
user_id=user_id,
- project_id=project_id,
- force_stop_first=force_stop_first,
- explicit=False,
+ product_name=product_name,
)
+ for project_id in child_projects:
+ await trash_project(
+ app,
+ # NOTE: this needs to be included in the unit-of-work, i.e. connection,
+ product_name=product_name,
+ user_id=user_id,
+ project_id=project_id,
+ force_stop_first=force_stop_first,
+ explicit=False,
+ )
+
async def untrash_folder(
app: web.Application,
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
index daeba51ae80..019ec5530b0 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py
@@ -13,10 +13,15 @@
from simcore_postgres_database.models.workspaces_access_rights import (
workspaces_access_rights,
)
+from simcore_postgres_database.utils_repos import (
+ pass_or_acquire_connection,
+ transaction_context,
+)
from sqlalchemy import func, literal_column
+from sqlalchemy.ext.asyncio import AsyncConnection
from sqlalchemy.sql import select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_asyncpg_engine
from .errors import WorkspaceGroupNotFoundError
_logger = logging.getLogger(__name__)
@@ -41,15 +46,16 @@ class Config:
async def create_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
- *,
read: bool,
write: bool,
delete: bool,
) -> WorkspaceGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces_access_rights.insert()
.values(
workspace_id=workspace_id,
@@ -68,6 +74,8 @@ async def create_workspace_group(
async def list_workspace_groups(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
) -> list[WorkspaceGroupGetDB]:
stmt = (
@@ -83,14 +91,15 @@ async def list_workspace_groups(
.where(workspaces_access_rights.c.workspace_id == workspace_id)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(stmt)
- rows = await result.fetchall() or []
- return [WorkspaceGroupGetDB.from_orm(row) for row in rows]
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(stmt)
+ return [WorkspaceGroupGetDB.from_orm(row) async for row in result]
async def get_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
) -> WorkspaceGroupGetDB:
@@ -110,8 +119,8 @@ async def get_workspace_group(
)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(stmt)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(stmt)
row = await result.first()
if row is None:
raise WorkspaceGroupNotFoundError(
@@ -122,15 +131,16 @@ async def get_workspace_group(
async def update_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
- *,
read: bool,
write: bool,
delete: bool,
) -> WorkspaceGroupGetDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces_access_rights.update()
.values(
read=read,
@@ -153,10 +163,12 @@ async def update_workspace_group(
async def delete_workspace_group(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
group_id: GroupID,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
await conn.execute(
workspaces_access_rights.delete().where(
(workspaces_access_rights.c.workspace_id == workspace_id)
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
index 23de15c3b19..a959843a969 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
@@ -22,11 +22,16 @@
from simcore_postgres_database.models.workspaces_access_rights import (
workspaces_access_rights,
)
+from simcore_postgres_database.utils_repos import (
+ pass_or_acquire_connection,
+ transaction_context,
+)
from sqlalchemy import asc, desc, func
from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
+from sqlalchemy.ext.asyncio import AsyncConnection
from sqlalchemy.sql import Subquery, select
-from ..db.plugin import get_database_engine
+from ..db.plugin import get_asyncpg_engine
from .errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError
_logger = logging.getLogger(__name__)
@@ -45,14 +50,16 @@
async def create_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
product_name: ProductName,
owner_primary_gid: GroupID,
name: str,
description: str | None,
thumbnail: str | None,
) -> WorkspaceDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces.insert()
.values(
name=name,
@@ -69,7 +76,7 @@ async def create_workspace(
return WorkspaceDB.from_orm(row)
-access_rights_subquery = (
+_access_rights_subquery = (
select(
workspaces_access_rights.c.workspace_id,
func.jsonb_object_agg(
@@ -116,6 +123,7 @@ def _create_my_access_rights_subquery(user_id: UserID) -> Subquery:
async def list_workspaces_for_user(
app: web.Application,
+ connection: AsyncConnection | None = None,
*,
user_id: UserID,
product_name: ProductName,
@@ -128,11 +136,11 @@ async def list_workspaces_for_user(
base_query = (
select(
*_SELECTION_ARGS,
- access_rights_subquery.c.access_rights,
+ _access_rights_subquery.c.access_rights,
my_access_rights_subquery.c.my_access_rights,
)
.select_from(
- workspaces.join(access_rights_subquery).join(my_access_rights_subquery)
+ workspaces.join(_access_rights_subquery).join(my_access_rights_subquery)
)
.where(workspaces.c.product_name == product_name)
)
@@ -148,21 +156,21 @@ async def list_workspaces_for_user(
list_query = base_query.order_by(desc(getattr(workspaces.c, order_by.field)))
list_query = list_query.offset(offset).limit(limit)
- async with get_database_engine(app).acquire() as conn:
- count_result = await conn.execute(count_query)
- total_count = await count_result.scalar()
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ total_count = await conn.scalar(count_query)
- result = await conn.execute(list_query)
- rows = await result.fetchall() or []
- results: list[UserWorkspaceAccessRightsDB] = [
- UserWorkspaceAccessRightsDB.from_orm(row) for row in rows
+ result = await conn.stream(list_query)
+ items: list[UserWorkspaceAccessRightsDB] = [
+ UserWorkspaceAccessRightsDB.from_orm(row) async for row in result
]
- return cast(int, total_count), results
+ return cast(int, total_count), items
async def get_workspace_for_user(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
user_id: UserID,
workspace_id: WorkspaceID,
product_name: ProductName,
@@ -172,11 +180,11 @@ async def get_workspace_for_user(
base_query = (
select(
*_SELECTION_ARGS,
- access_rights_subquery.c.access_rights,
+ _access_rights_subquery.c.access_rights,
my_access_rights_subquery.c.my_access_rights,
)
.select_from(
- workspaces.join(access_rights_subquery).join(my_access_rights_subquery)
+ workspaces.join(_access_rights_subquery).join(my_access_rights_subquery)
)
.where(
(workspaces.c.workspace_id == workspace_id)
@@ -184,8 +192,8 @@ async def get_workspace_for_user(
)
)
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(base_query)
+ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(base_query)
row = await result.first()
if row is None:
raise WorkspaceAccessForbiddenError(
@@ -196,14 +204,16 @@ async def get_workspace_for_user(
async def update_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
name: str,
description: str | None,
thumbnail: str | None,
product_name: ProductName,
) -> WorkspaceDB:
- async with get_database_engine(app).acquire() as conn:
- result = await conn.execute(
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
+ result = await conn.stream(
workspaces.update()
.values(
name=name,
@@ -225,10 +235,12 @@ async def update_workspace(
async def delete_workspace(
app: web.Application,
+ connection: AsyncConnection | None = None,
+ *,
workspace_id: WorkspaceID,
product_name: ProductName,
) -> None:
- async with get_database_engine(app).acquire() as conn:
+ async with transaction_context(get_asyncpg_engine(app), connection) as conn:
await conn.execute(
workspaces.delete().where(
(workspaces.c.workspace_id == workspace_id)
From ec1e84e842cc1f70b3e8751325cca808c305b18c Mon Sep 17 00:00:00 2001
From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com>
Date: Tue, 12 Nov 2024 10:58:07 +0100
Subject: [PATCH 05/17] =?UTF-8?q?=F0=9F=94=A8=20Fixes=20e2e:=20tests/perfo?=
=?UTF-8?q?mance=20=20(#6707)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
tests/performance/Makefile | 12 ++++-
.../locust_files/platform_ping_test.py | 4 +-
tests/performance/locust_settings.py | 50 ++++++++++++++++++-
3 files changed, 60 insertions(+), 6 deletions(-)
diff --git a/tests/performance/Makefile b/tests/performance/Makefile
index ead1e417d39..d41a60d7af8 100644
--- a/tests/performance/Makefile
+++ b/tests/performance/Makefile
@@ -12,7 +12,8 @@ export ENV_FILE
NETWORK_NAME=dashboards_timenet
# UTILS
-get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut --delimiter=" " --fields=1)
+# NOTE: keep short arguments for `cut` so it works in both BusyBox (alpine) AND Ubuntu
+get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut -d " " -f 1)
# Check that given variables are set and all have non-empty values,
# die with an error otherwise.
@@ -28,6 +29,7 @@ __check_defined = \
$(error Undefined $1$(if $2, ($2))))
+
.PHONY: build
build: ## builds distributed osparc locust docker image
docker \
@@ -42,6 +44,8 @@ build: ## builds distributed osparc locust docker image
push:
docker push itisfoundation/locust:$(LOCUST_VERSION)
+
+
.PHONY: down
down: ## stops and removes osparc locust containers
docker compose --file docker-compose.yml down
@@ -55,6 +59,8 @@ test: ## runs osparc locust. Locust and test configuration are specified in ENV_
fi
docker compose --file docker-compose.yml up --scale worker=4 --exit-code-from=master
+
+
.PHONY: dashboards-up dashboards-down
dashboards-up: ## Create Grafana dashboard for inspecting locust results. See dashboard on localhost:3000
@@ -68,6 +74,8 @@ dashboards-up: ## Create Grafana dashboard for inspecting locust results. See da
dashboards-down:
@locust-compose down
+
+
.PHONY: install-ci install-dev
install-dev:
@@ -80,4 +88,4 @@ install-ci:
.PHONY: config
config:
@$(call check_defined, input, please define inputs when calling $@ - e.g. ```make $@ input="--help"```)
- @uv run locust_settings.py $(input) | tee .env
+ @uv run locust_settings.py $(input) | tee "${ENV_FILE}"
diff --git a/tests/performance/locust_files/platform_ping_test.py b/tests/performance/locust_files/platform_ping_test.py
index 61cb0733458..c8839bb8c2b 100644
--- a/tests/performance/locust_files/platform_ping_test.py
+++ b/tests/performance/locust_files/platform_ping_test.py
@@ -19,7 +19,7 @@
assert locust_plugins # nosec
-class LocustAuth(BaseSettings):
+class MonitoringBasicAuth(BaseSettings):
SC_USER_NAME: str = Field(default=..., examples=[""])
SC_PASSWORD: str = Field(default=..., examples=[""])
@@ -27,7 +27,7 @@ class LocustAuth(BaseSettings):
class WebApiUser(FastHttpUser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- _auth = LocustAuth()
+ _auth = MonitoringBasicAuth()
self.auth = (
_auth.SC_USER_NAME,
_auth.SC_PASSWORD,
diff --git a/tests/performance/locust_settings.py b/tests/performance/locust_settings.py
index 24f896180fd..48c219871fe 100644
--- a/tests/performance/locust_settings.py
+++ b/tests/performance/locust_settings.py
@@ -1,10 +1,21 @@
+# /// script
+# requires-python = ">=3.11"
+# dependencies = [
+# "parse",
+# "pydantic",
+# "pydantic-settings",
+# ]
+# ///
# pylint: disable=unused-argument
# pylint: disable=no-self-use
# pylint: disable=no-name-in-module
+import importlib.util
+import inspect
import json
from datetime import timedelta
from pathlib import Path
+from types import ModuleType
from typing import Final
from parse import Result, parse
@@ -26,6 +37,37 @@
assert _LOCUST_FILES_DIR.is_dir()
+def _check_load_and_instantiate_settings_classes(file_path: str):
+ module_name = Path(file_path).stem
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
+ if spec is None or spec.loader is None:
+ msg = f"Invalid {file_path=}"
+ raise ValueError(msg)
+
+ module: ModuleType = importlib.util.module_from_spec(spec)
+
+ # Execute the module in its own namespace
+ try:
+ spec.loader.exec_module(module)
+ except Exception as e:
+ msg = f"Failed to load module {module_name} from {file_path}"
+ raise ValueError(msg) from e
+
+ # Filter subclasses of BaseSettings
+ settings_classes = [
+ obj
+ for _, obj in inspect.getmembers(module, inspect.isclass)
+ if issubclass(obj, BaseSettings) and obj is not BaseSettings
+ ]
+
+ for settings_class in settings_classes:
+ try:
+ settings_class()
+ except Exception as e:
+ msg = f"Missing env vars for {settings_class.__name__} in {file_path=}: {e}"
+ raise ValueError(msg) from e
+
+
class LocustSettings(BaseSettings):
model_config = SettingsConfigDict(cli_parse_args=True)
@@ -44,8 +86,8 @@ class LocustSettings(BaseSettings):
LOCUST_RUN_TIME: timedelta
LOCUST_SPAWN_RATE: PositiveInt = Field(default=20)
- # Options for Timescale + Grafana Dashboards
- # SEE https://github.com/SvenskaSpel/locust-plugins/blob/master/locust_plugins/timescale/
+ # Timescale: Log and graph results using TimescaleDB and Grafana dashboards
+ # SEE https://github.com/SvenskaSpel/locust-plugins/tree/master/locust_plugins/dashboards
#
LOCUST_TIMESCALE: NonNegativeInt = Field(
default=1,
@@ -87,6 +129,10 @@ def _validate_locust_file(cls, v: Path) -> Path:
if not v.is_relative_to(_LOCUST_FILES_DIR):
msg = f"{v} must be a test file relative to {_LOCUST_FILES_DIR}"
raise ValueError(msg)
+
+ # NOTE: CHECK that all the env-vars are defined for this test
+ # _check_load_and_instantiate_settings_classes(f"{v}")
+
return v.relative_to(_TEST_DIR)
@field_serializer("LOCUST_RUN_TIME")
From f8f67c9609bfa7a701d2f60f421d81a9d6ea1688 Mon Sep 17 00:00:00 2001
From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com>
Date: Tue, 12 Nov 2024 13:49:27 +0100
Subject: [PATCH 06/17] =?UTF-8?q?=E2=9C=85=20Extends=20test=5FEC2=5FINSTAN?=
=?UTF-8?q?CES=5FALLOWED=5FTYPES=5Fempty=5Fnot=5Fallowed=20(#6705)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../tests/unit/test_core_settings.py | 34 ++++++++++++++++++-
1 file changed, 33 insertions(+), 1 deletion(-)
diff --git a/services/autoscaling/tests/unit/test_core_settings.py b/services/autoscaling/tests/unit/test_core_settings.py
index 9315c8fcfd1..e975d944f0b 100644
--- a/services/autoscaling/tests/unit/test_core_settings.py
+++ b/services/autoscaling/tests/unit/test_core_settings.py
@@ -4,6 +4,7 @@
import datetime
import json
+import os
import pytest
from faker import Faker
@@ -197,11 +198,42 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_passing_valid_image_tags( # noqa: N802
def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed( # noqa: N802
app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
):
+ assert app_environment["AUTOSCALING_EC2_INSTANCES"] == "{}"
monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}")
- with pytest.raises(ValidationError):
+ # test child settings
+ with pytest.raises(ValidationError) as err_info:
+ EC2InstancesSettings.create_from_envs()
+
+ assert err_info.value.errors()[0]["loc"] == ("EC2_INSTANCES_ALLOWED_TYPES",)
+
+
+def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_with_main_field_env_var( # noqa: N802
+ app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
+):
+ assert os.environ["AUTOSCALING_EC2_INSTANCES"] == "{}"
+ monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}")
+
+ # now as part of AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None
+ with pytest.raises(ValidationError) as exc_before:
+ ApplicationSettings.create_from_envs(AUTOSCALING_EC2_INSTANCES={})
+
+ with pytest.raises(ValidationError) as exc_after:
ApplicationSettings.create_from_envs()
+ assert exc_before.value.errors() == exc_after.value.errors()
+
+
+def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_without_main_field_env_var( # noqa: N802
+ app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch
+):
+ monkeypatch.delenv("AUTOSCALING_EC2_INSTANCES")
+ monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}")
+
+ # removing any value for AUTOSCALING_EC2_INSTANCES
+ settings = ApplicationSettings.create_from_envs()
+ assert settings.AUTOSCALING_EC2_INSTANCES is None
+
def test_invalid_instance_names(
app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, faker: Faker
From f7e6d5b811129c7749673f2efada9da8163be5bf Mon Sep 17 00:00:00 2001
From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com>
Date: Tue, 12 Nov 2024 15:30:27 +0100
Subject: [PATCH 07/17] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor=20listing?=
=?UTF-8?q?=20folders=20(#6703)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
api/specs/web-server/_folders.py | 21 +
.../src/models_library/folders.py | 8 +
.../simcore_postgres_database/utils_sql.py | 6 +
.../utils_workspaces_sql.py | 30 +
.../db_access_layer.py | 9 +-
.../api/v0/openapi.yaml | 64 ++
.../folders/_folders_api.py | 711 +++++++++---------
.../folders/_folders_db.py | 148 +++-
.../folders/_folders_handlers.py | 41 +
.../folders/_models.py | 35 +-
.../workspaces/_workspaces_db.py | 38 +-
.../04/folders/test_folders__full_search.py | 123 +++
...st_workspaces__list_folders_full_search.py | 65 ++
13 files changed, 879 insertions(+), 420 deletions(-)
create mode 100644 packages/postgres-database/src/simcore_postgres_database/utils_sql.py
create mode 100644 packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py
create mode 100644 services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
create mode 100644 services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py
diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py
index 90f1ad3beb1..25eecea5cd0 100644
--- a/api/specs/web-server/_folders.py
+++ b/api/specs/web-server/_folders.py
@@ -63,6 +63,27 @@ async def list_folders(
...
+@router.get(
+ "/folders:search",
+ response_model=Envelope[list[FolderGet]],
+)
+async def list_folders_full_search(
+ params: Annotated[PageQueryParameters, Depends()],
+ order_by: Annotated[
+ Json,
+ Query(
+ description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.",
+ example='{"field": "name", "direction": "desc"}',
+ ),
+ ] = '{"field": "modified_at", "direction": "desc"}',
+ filters: Annotated[
+ Json | None,
+ Query(description=FolderFilters.schema_json(indent=1)),
+ ] = None,
+):
+ ...
+
+
@router.get(
"/folders/{folder_id}",
response_model=Envelope[FolderGet],
diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py
index 485e74b86c8..1d2b9622943 100644
--- a/packages/models-library/src/models_library/folders.py
+++ b/packages/models-library/src/models_library/folders.py
@@ -4,6 +4,7 @@
from pydantic import BaseModel, Field, PositiveInt, validator
+from .access_rights import AccessRights
from .users import GroupID, UserID
from .utils.enums import StrAutoEnum
from .workspaces import WorkspaceID
@@ -66,3 +67,10 @@ class FolderDB(BaseModel):
class Config:
orm_mode = True
+
+
+class UserFolderAccessRightsDB(FolderDB):
+ my_access_rights: AccessRights
+
+ class Config:
+ orm_mode = True
diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_sql.py
new file mode 100644
index 00000000000..e3d4e1438af
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/utils_sql.py
@@ -0,0 +1,6 @@
+def assemble_array_groups(user_group_ids: list[int]) -> str:
+ return (
+ "array[]::text[]"
+ if len(user_group_ids) == 0
+ else f"""array[{', '.join(f"'{group_id}'" for group_id in user_group_ids)}]"""
+ )
diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py
new file mode 100644
index 00000000000..05b24d969bd
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py
@@ -0,0 +1,30 @@
+from simcore_postgres_database.models.groups import user_to_groups
+from simcore_postgres_database.models.workspaces_access_rights import (
+ workspaces_access_rights,
+)
+from sqlalchemy import func
+from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
+from sqlalchemy.sql import Subquery, select
+
+
+def create_my_workspace_access_rights_subquery(user_id: int) -> Subquery:
+ return (
+ select(
+ workspaces_access_rights.c.workspace_id,
+ func.json_build_object(
+ "read",
+ func.max(workspaces_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN),
+ "write",
+ func.max(workspaces_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN),
+ "delete",
+ func.max(workspaces_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN),
+ ).label("my_access_rights"),
+ )
+ .select_from(
+ workspaces_access_rights.join(
+ user_to_groups, user_to_groups.c.gid == workspaces_access_rights.c.gid
+ )
+ )
+ .where(user_to_groups.c.uid == user_id)
+ .group_by(workspaces_access_rights.c.workspace_id)
+ ).subquery("my_workspace_access_rights_subquery")
diff --git a/services/storage/src/simcore_service_storage/db_access_layer.py b/services/storage/src/simcore_service_storage/db_access_layer.py
index 19452862de5..b77504088f1 100644
--- a/services/storage/src/simcore_service_storage/db_access_layer.py
+++ b/services/storage/src/simcore_service_storage/db_access_layer.py
@@ -51,6 +51,7 @@
workspaces_access_rights,
)
from simcore_postgres_database.storage_models import file_meta_data, user_to_groups
+from simcore_postgres_database.utils_sql import assemble_array_groups
logger = logging.getLogger(__name__)
@@ -117,14 +118,6 @@ def _aggregate_access_rights(
return AccessRights.none()
-def assemble_array_groups(user_group_ids: list[GroupID]) -> str:
- return (
- "array[]::text[]"
- if len(user_group_ids) == 0
- else f"""array[{', '.join(f"'{group_id}'" for group_id in user_group_ids)}]"""
- )
-
-
access_rights_subquery = (
sa.select(
project_to_groups.c.project_uuid,
diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
index dafb3f8fb08..40d0841c65a 100644
--- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
+++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
@@ -2690,6 +2690,70 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/Envelope_FolderGet_'
+ /v0/folders:search:
+ get:
+ tags:
+ - folders
+ summary: List Folders Full Search
+ operationId: list_folders_full_search
+ parameters:
+ - description: Order by field (modified_at|name|description) and direction (asc|desc).
+ The default sorting order is ascending.
+ required: false
+ schema:
+ title: Order By
+ description: Order by field (modified_at|name|description) and direction
+ (asc|desc). The default sorting order is ascending.
+ default: '{"field": "modified_at", "direction": "desc"}'
+ example: '{"field": "name", "direction": "desc"}'
+ name: order_by
+ in: query
+ - description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\
+ : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\
+ \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\
+ \ \"type\": \"boolean\"\n }\n }\n}"
+ required: false
+ schema:
+ title: Filters
+ type: string
+ description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\
+ \ as JSON. Each available filter can have its own logic (should be well\
+ \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\
+ ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\
+ title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\
+ \ false to list non-trashed (default), None to list all\",\n \"default\"\
+ : false,\n \"type\": \"boolean\"\n }\n }\n}"
+ format: json-string
+ name: filters
+ in: query
+ - required: false
+ schema:
+ title: Limit
+ exclusiveMaximum: true
+ minimum: 1
+ type: integer
+ default: 20
+ maximum: 50
+ name: limit
+ in: query
+ - required: false
+ schema:
+ title: Offset
+ minimum: 0
+ type: integer
+ default: 0
+ name: offset
+ in: query
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Envelope_list_models_library.api_schemas_webserver.folders_v2.FolderGet__'
/v0/folders/{folder_id}:
get:
tags:
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
index 043527d2def..a791a65c715 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
@@ -1,341 +1,370 @@
-# pylint: disable=unused-argument
-
-import logging
-
-from aiohttp import web
-from models_library.access_rights import AccessRights
-from models_library.api_schemas_webserver.folders_v2 import FolderGet, FolderGetPage
-from models_library.folders import FolderID
-from models_library.products import ProductName
-from models_library.projects import ProjectID
-from models_library.rest_ordering import OrderBy
-from models_library.users import UserID
-from models_library.workspaces import WorkspaceID
-from pydantic import NonNegativeInt
-from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY
-from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
-from servicelib.utils import fire_and_forget_task
-
-from ..folders.errors import FolderValueNotPermittedError
-from ..projects.projects_api import submit_delete_project_task
-from ..users.api import get_user
-from ..workspaces.api import check_user_workspace_access
-from ..workspaces.errors import (
- WorkspaceAccessForbiddenError,
- WorkspaceFolderInconsistencyError,
-)
-from . import _folders_db as folders_db
-
-_logger = logging.getLogger(__name__)
-
-
-async def create_folder(
- app: web.Application,
- user_id: UserID,
- name: str,
- parent_folder_id: FolderID | None,
- product_name: ProductName,
- workspace_id: WorkspaceID | None,
-) -> FolderGet:
- user = await get_user(app, user_id=user_id)
-
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
- if workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=workspace_id,
- product_name=product_name,
- permission="write",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- # Check parent_folder_id lives in the workspace
- if parent_folder_id:
- parent_folder_db = await folders_db.get(
- app, folder_id=parent_folder_id, product_name=product_name
- )
- if parent_folder_db.workspace_id != workspace_id:
- raise WorkspaceFolderInconsistencyError(
- folder_id=parent_folder_id, workspace_id=workspace_id
- )
-
- if parent_folder_id:
- # Check user has access to the parent folder
- parent_folder_db = await folders_db.get_for_user_or_workspace(
- app,
- folder_id=parent_folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- )
- if workspace_id and parent_folder_db.workspace_id != workspace_id:
- # Check parent folder id exists inside the same workspace
- raise WorkspaceAccessForbiddenError(
- reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}."
- )
-
- folder_db = await folders_db.create(
- app,
- product_name=product_name,
- created_by_gid=user["primary_gid"],
- folder_name=name,
- parent_folder_id=parent_folder_id,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- )
- return FolderGet(
- folder_id=folder_db.folder_id,
- parent_folder_id=folder_db.parent_folder_id,
- name=folder_db.name,
- created_at=folder_db.created,
- modified_at=folder_db.modified,
- trashed_at=folder_db.trashed_at,
- owner=folder_db.created_by_gid,
- workspace_id=workspace_id,
- my_access_rights=user_folder_access_rights,
- )
-
-
-async def get_folder(
- app: web.Application,
- user_id: UserID,
- folder_id: FolderID,
- product_name: ProductName,
-) -> FolderGet:
- folder_db = await folders_db.get(
- app, folder_id=folder_id, product_name=product_name
- )
-
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
- if folder_db.workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=folder_db.workspace_id,
- product_name=product_name,
- permission="read",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- folder_db = await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
- return FolderGet(
- folder_id=folder_db.folder_id,
- parent_folder_id=folder_db.parent_folder_id,
- name=folder_db.name,
- created_at=folder_db.created,
- modified_at=folder_db.modified,
- trashed_at=folder_db.trashed_at,
- owner=folder_db.created_by_gid,
- workspace_id=folder_db.workspace_id,
- my_access_rights=user_folder_access_rights,
- )
-
-
-async def list_folders(
- app: web.Application,
- user_id: UserID,
- product_name: ProductName,
- folder_id: FolderID | None,
- workspace_id: WorkspaceID | None,
- trashed: bool | None,
- offset: NonNegativeInt,
- limit: int,
- order_by: OrderBy,
-) -> FolderGetPage:
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
-
- if workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=workspace_id,
- product_name=product_name,
- permission="read",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- if folder_id:
- # Check user access to folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- )
-
- total_count, folders = await folders_db.list_(
- app,
- content_of_folder_id=folder_id,
- user_id=user_id if workspace_is_private else None,
- workspace_id=workspace_id,
- product_name=product_name,
- trashed=trashed,
- offset=offset,
- limit=limit,
- order_by=order_by,
- )
- return FolderGetPage(
- items=[
- FolderGet(
- folder_id=folder.folder_id,
- parent_folder_id=folder.parent_folder_id,
- name=folder.name,
- created_at=folder.created,
- modified_at=folder.modified,
- trashed_at=folder.trashed_at,
- owner=folder.created_by_gid,
- workspace_id=folder.workspace_id,
- my_access_rights=user_folder_access_rights,
- )
- for folder in folders
- ],
- total=total_count,
- )
-
-
-async def update_folder(
- app: web.Application,
- user_id: UserID,
- folder_id: FolderID,
- *,
- name: str,
- parent_folder_id: FolderID | None,
- product_name: ProductName,
-) -> FolderGet:
- folder_db = await folders_db.get(
- app, folder_id=folder_id, product_name=product_name
- )
-
- workspace_is_private = True
- user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
- if folder_db.workspace_id:
- user_workspace_access_rights = await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=folder_db.workspace_id,
- product_name=product_name,
- permission="write",
- )
- workspace_is_private = False
- user_folder_access_rights = user_workspace_access_rights.my_access_rights
-
- # Check user has access to the folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
-
- if folder_db.parent_folder_id != parent_folder_id and parent_folder_id is not None:
- # Check user has access to the parent folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=parent_folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
- # Do not allow to move to a child folder id
- _child_folders = await folders_db.get_folders_recursively(
- app, folder_id=folder_id, product_name=product_name
- )
- if parent_folder_id in _child_folders:
- raise FolderValueNotPermittedError(
- reason="Parent folder id should not be one of children"
- )
-
- folder_db = await folders_db.update(
- app,
- folders_id_or_ids=folder_id,
- name=name,
- parent_folder_id=parent_folder_id,
- product_name=product_name,
- )
- return FolderGet(
- folder_id=folder_db.folder_id,
- parent_folder_id=folder_db.parent_folder_id,
- name=folder_db.name,
- created_at=folder_db.created,
- modified_at=folder_db.modified,
- trashed_at=folder_db.trashed_at,
- owner=folder_db.created_by_gid,
- workspace_id=folder_db.workspace_id,
- my_access_rights=user_folder_access_rights,
- )
-
-
-async def delete_folder(
- app: web.Application,
- user_id: UserID,
- folder_id: FolderID,
- product_name: ProductName,
-) -> None:
- folder_db = await folders_db.get(
- app, folder_id=folder_id, product_name=product_name
- )
-
- workspace_is_private = True
- if folder_db.workspace_id:
- await check_user_workspace_access(
- app,
- user_id=user_id,
- workspace_id=folder_db.workspace_id,
- product_name=product_name,
- permission="delete",
- )
- workspace_is_private = False
-
- # Check user has access to the folder
- await folders_db.get_for_user_or_workspace(
- app,
- folder_id=folder_id,
- product_name=product_name,
- user_id=user_id if workspace_is_private else None,
- workspace_id=folder_db.workspace_id,
- )
-
- # 1. Delete folder content
- # 1.1 Delete all child projects that I am an owner
- project_id_list: list[
- ProjectID
- ] = await folders_db.get_projects_recursively_only_if_user_is_owner(
- app,
- folder_id=folder_id,
- private_workspace_user_id_or_none=user_id if workspace_is_private else None,
- user_id=user_id,
- product_name=product_name,
- )
-
- # fire and forget task for project deletion
- for project_id in project_id_list:
- fire_and_forget_task(
- submit_delete_project_task(
- app,
- project_uuid=project_id,
- user_id=user_id,
- simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE,
- ),
- task_suffix_name=f"delete_project_task_{project_id}",
- fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY],
- )
-
- # 1.2 Delete all child folders
- await folders_db.delete_recursively(
- app, folder_id=folder_id, product_name=product_name
- )
+# pylint: disable=unused-argument
+
+import logging
+
+from aiohttp import web
+from models_library.access_rights import AccessRights
+from models_library.api_schemas_webserver.folders_v2 import FolderGet, FolderGetPage
+from models_library.folders import FolderID, FolderQuery, FolderScope
+from models_library.products import ProductName
+from models_library.projects import ProjectID
+from models_library.rest_ordering import OrderBy
+from models_library.users import UserID
+from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
+from pydantic import NonNegativeInt
+from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY
+from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
+from servicelib.utils import fire_and_forget_task
+
+from ..folders.errors import FolderValueNotPermittedError
+from ..projects.projects_api import submit_delete_project_task
+from ..users.api import get_user
+from ..workspaces.api import check_user_workspace_access
+from ..workspaces.errors import (
+ WorkspaceAccessForbiddenError,
+ WorkspaceFolderInconsistencyError,
+)
+from . import _folders_db as folders_db
+
+_logger = logging.getLogger(__name__)
+
+
+async def create_folder(
+ app: web.Application,
+ user_id: UserID,
+ name: str,
+ parent_folder_id: FolderID | None,
+ product_name: ProductName,
+ workspace_id: WorkspaceID | None,
+) -> FolderGet:
+ user = await get_user(app, user_id=user_id)
+
+ workspace_is_private = True
+ user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
+ if workspace_id:
+ user_workspace_access_rights = await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=workspace_id,
+ product_name=product_name,
+ permission="write",
+ )
+ workspace_is_private = False
+ user_folder_access_rights = user_workspace_access_rights.my_access_rights
+
+ # Check parent_folder_id lives in the workspace
+ if parent_folder_id:
+ parent_folder_db = await folders_db.get(
+ app, folder_id=parent_folder_id, product_name=product_name
+ )
+ if parent_folder_db.workspace_id != workspace_id:
+ raise WorkspaceFolderInconsistencyError(
+ folder_id=parent_folder_id, workspace_id=workspace_id
+ )
+
+ if parent_folder_id:
+ # Check user has access to the parent folder
+ parent_folder_db = await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=parent_folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=workspace_id,
+ )
+ if workspace_id and parent_folder_db.workspace_id != workspace_id:
+ # Check parent folder id exists inside the same workspace
+ raise WorkspaceAccessForbiddenError(
+ reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}."
+ )
+
+ folder_db = await folders_db.create(
+ app,
+ product_name=product_name,
+ created_by_gid=user["primary_gid"],
+ folder_name=name,
+ parent_folder_id=parent_folder_id,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=workspace_id,
+ )
+ return FolderGet(
+ folder_id=folder_db.folder_id,
+ parent_folder_id=folder_db.parent_folder_id,
+ name=folder_db.name,
+ created_at=folder_db.created,
+ modified_at=folder_db.modified,
+ trashed_at=folder_db.trashed_at,
+ owner=folder_db.created_by_gid,
+ workspace_id=workspace_id,
+ my_access_rights=user_folder_access_rights,
+ )
+
+
+async def get_folder(
+ app: web.Application,
+ user_id: UserID,
+ folder_id: FolderID,
+ product_name: ProductName,
+) -> FolderGet:
+ folder_db = await folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ workspace_is_private = True
+ user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
+ if folder_db.workspace_id:
+ user_workspace_access_rights = await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="read",
+ )
+ workspace_is_private = False
+ user_folder_access_rights = user_workspace_access_rights.my_access_rights
+
+ folder_db = await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+ return FolderGet(
+ folder_id=folder_db.folder_id,
+ parent_folder_id=folder_db.parent_folder_id,
+ name=folder_db.name,
+ created_at=folder_db.created,
+ modified_at=folder_db.modified,
+ trashed_at=folder_db.trashed_at,
+ owner=folder_db.created_by_gid,
+ workspace_id=folder_db.workspace_id,
+ my_access_rights=user_folder_access_rights,
+ )
+
+
+async def list_folders(
+ app: web.Application,
+ user_id: UserID,
+ product_name: ProductName,
+ folder_id: FolderID | None,
+ workspace_id: WorkspaceID | None,
+ trashed: bool | None,
+ offset: NonNegativeInt,
+ limit: int,
+ order_by: OrderBy,
+) -> FolderGetPage:
+ # NOTE: Folder access rights for listing are checked within the listing DB function.
+
+ total_count, folders = await folders_db.list_(
+ app,
+ product_name=product_name,
+ user_id=user_id,
+ folder_query=(
+ FolderQuery(folder_scope=FolderScope.SPECIFIC, folder_id=folder_id)
+ if folder_id
+ else FolderQuery(folder_scope=FolderScope.ROOT)
+ ),
+ workspace_query=(
+ WorkspaceQuery(
+ workspace_scope=WorkspaceScope.SHARED, workspace_id=workspace_id
+ )
+ if workspace_id
+ else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE)
+ ),
+ filter_trashed=trashed,
+ offset=offset,
+ limit=limit,
+ order_by=order_by,
+ )
+ return FolderGetPage(
+ items=[
+ FolderGet(
+ folder_id=folder.folder_id,
+ parent_folder_id=folder.parent_folder_id,
+ name=folder.name,
+ created_at=folder.created,
+ modified_at=folder.modified,
+ trashed_at=folder.trashed_at,
+ owner=folder.created_by_gid,
+ workspace_id=folder.workspace_id,
+ my_access_rights=folder.my_access_rights,
+ )
+ for folder in folders
+ ],
+ total=total_count,
+ )
+
+
+async def list_folders_full_search(
+ app: web.Application,
+ user_id: UserID,
+ product_name: ProductName,
+ trashed: bool | None,
+ offset: NonNegativeInt,
+ limit: int,
+ order_by: OrderBy,
+) -> FolderGetPage:
+ # NOTE: Folder access rights for listing are checked within the listing DB function.
+
+ total_count, folders = await folders_db.list_(
+ app,
+ product_name=product_name,
+ user_id=user_id,
+ folder_query=FolderQuery(folder_scope=FolderScope.ALL),
+ workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL),
+ filter_trashed=trashed,
+ offset=offset,
+ limit=limit,
+ order_by=order_by,
+ )
+ return FolderGetPage(
+ items=[
+ FolderGet(
+ folder_id=folder.folder_id,
+ parent_folder_id=folder.parent_folder_id,
+ name=folder.name,
+ created_at=folder.created,
+ modified_at=folder.modified,
+ trashed_at=folder.trashed_at,
+ owner=folder.created_by_gid,
+ workspace_id=folder.workspace_id,
+ my_access_rights=folder.my_access_rights,
+ )
+ for folder in folders
+ ],
+ total=total_count,
+ )
+
+
+async def update_folder(
+ app: web.Application,
+ user_id: UserID,
+ folder_id: FolderID,
+ *,
+ name: str,
+ parent_folder_id: FolderID | None,
+ product_name: ProductName,
+) -> FolderGet:
+ folder_db = await folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ workspace_is_private = True
+ user_folder_access_rights = AccessRights(read=True, write=True, delete=True)
+ if folder_db.workspace_id:
+ user_workspace_access_rights = await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="write",
+ )
+ workspace_is_private = False
+ user_folder_access_rights = user_workspace_access_rights.my_access_rights
+
+ # Check user has access to the folder
+ await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+
+ if folder_db.parent_folder_id != parent_folder_id and parent_folder_id is not None:
+ # Check user has access to the parent folder
+ await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=parent_folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+ # Do not allow to move to a child folder id
+ _child_folders = await folders_db.get_folders_recursively(
+ app, folder_id=folder_id, product_name=product_name
+ )
+ if parent_folder_id in _child_folders:
+ raise FolderValueNotPermittedError(
+ reason="Parent folder id should not be one of children"
+ )
+
+ folder_db = await folders_db.update(
+ app,
+ folders_id_or_ids=folder_id,
+ name=name,
+ parent_folder_id=parent_folder_id,
+ product_name=product_name,
+ )
+ return FolderGet(
+ folder_id=folder_db.folder_id,
+ parent_folder_id=folder_db.parent_folder_id,
+ name=folder_db.name,
+ created_at=folder_db.created,
+ modified_at=folder_db.modified,
+ trashed_at=folder_db.trashed_at,
+ owner=folder_db.created_by_gid,
+ workspace_id=folder_db.workspace_id,
+ my_access_rights=user_folder_access_rights,
+ )
+
+
+async def delete_folder(
+ app: web.Application,
+ user_id: UserID,
+ folder_id: FolderID,
+ product_name: ProductName,
+) -> None:
+ folder_db = await folders_db.get(
+ app, folder_id=folder_id, product_name=product_name
+ )
+
+ workspace_is_private = True
+ if folder_db.workspace_id:
+ await check_user_workspace_access(
+ app,
+ user_id=user_id,
+ workspace_id=folder_db.workspace_id,
+ product_name=product_name,
+ permission="delete",
+ )
+ workspace_is_private = False
+
+ # Check user has access to the folder
+ await folders_db.get_for_user_or_workspace(
+ app,
+ folder_id=folder_id,
+ product_name=product_name,
+ user_id=user_id if workspace_is_private else None,
+ workspace_id=folder_db.workspace_id,
+ )
+
+ # 1. Delete folder content
+ # 1.1 Delete all child projects that I am an owner
+ project_id_list: list[
+ ProjectID
+ ] = await folders_db.get_projects_recursively_only_if_user_is_owner(
+ app,
+ folder_id=folder_id,
+ private_workspace_user_id_or_none=user_id if workspace_is_private else None,
+ user_id=user_id,
+ product_name=product_name,
+ )
+
+ # fire and forget task for project deletion
+ for project_id in project_id_list:
+ fire_and_forget_task(
+ submit_delete_project_task(
+ app,
+ project_uuid=project_id,
+ user_id=user_id,
+ simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE,
+ ),
+ task_suffix_name=f"delete_project_task_{project_id}",
+ fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY],
+ )
+
+ # 1.2 Delete all child folders
+ await folders_db.delete_recursively(
+ app, folder_id=folder_id, product_name=product_name
+ )
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
index 561bcb64c9e..0af9d36dadf 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
@@ -8,13 +8,20 @@
from datetime import datetime
from typing import Any, Final, cast
+import sqlalchemy as sa
from aiohttp import web
-from models_library.folders import FolderDB, FolderID
+from models_library.folders import (
+ FolderDB,
+ FolderID,
+ FolderQuery,
+ FolderScope,
+ UserFolderAccessRightsDB,
+)
from models_library.products import ProductName
from models_library.projects import ProjectID
from models_library.rest_ordering import OrderBy, OrderDirection
from models_library.users import GroupID, UserID
-from models_library.workspaces import WorkspaceID
+from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope
from pydantic import NonNegativeInt
from simcore_postgres_database.models.folders_v2 import folders_v2
from simcore_postgres_database.models.projects import projects
@@ -23,10 +30,13 @@
pass_or_acquire_connection,
transaction_context,
)
+from simcore_postgres_database.utils_workspaces_sql import (
+ create_my_workspace_access_rights_subquery,
+)
from sqlalchemy import func
from sqlalchemy.ext.asyncio import AsyncConnection
from sqlalchemy.orm import aliased
-from sqlalchemy.sql import asc, desc, select
+from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, asc, desc, select
from ..db.plugin import get_asyncpg_engine
from .errors import FolderAccessForbiddenError, FolderNotFoundError
@@ -92,68 +102,142 @@ async def create(
return FolderDB.from_orm(row)
-async def list_(
+async def list_( # pylint: disable=too-many-arguments,too-many-branches
app: web.Application,
connection: AsyncConnection | None = None,
*,
- content_of_folder_id: FolderID | None,
- user_id: UserID | None,
- workspace_id: WorkspaceID | None,
product_name: ProductName,
- trashed: bool | None,
+ user_id: UserID,
+ # hierarchy filters
+ folder_query: FolderQuery,
+ workspace_query: WorkspaceQuery,
+ # attribute filters
+ filter_trashed: bool | None,
+ # pagination
offset: NonNegativeInt,
limit: int,
+ # order
order_by: OrderBy,
-) -> tuple[int, list[FolderDB]]:
+) -> tuple[int, list[UserFolderAccessRightsDB]]:
"""
- content_of_folder_id - Used to filter in which folder we want to list folders. None means root folder.
+ folder_query - Used to filter in which folder we want to list folders.
trashed - If set to true, it returns folders **explicitly** trashed, if false then non-trashed folders.
"""
- assert not ( # nosec
- user_id is not None and workspace_id is not None
- ), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one."
- base_query = (
- select(*_SELECTION_ARGS)
- .select_from(folders_v2)
- .where(
- (folders_v2.c.product_name == product_name)
- & (folders_v2.c.parent_folder_id == content_of_folder_id)
- )
+ workspace_access_rights_subquery = create_my_workspace_access_rights_subquery(
+ user_id=user_id
)
- if user_id:
- base_query = base_query.where(folders_v2.c.user_id == user_id)
+ if workspace_query.workspace_scope is not WorkspaceScope.SHARED:
+ assert workspace_query.workspace_scope in ( # nosec
+ WorkspaceScope.PRIVATE,
+ WorkspaceScope.ALL,
+ )
+
+ private_workspace_query = (
+ select(
+ *_SELECTION_ARGS,
+ func.json_build_object(
+ "read",
+ sa.text("true"),
+ "write",
+ sa.text("true"),
+ "delete",
+ sa.text("true"),
+ ).label("my_access_rights"),
+ )
+ .select_from(folders_v2)
+ .where(
+ (folders_v2.c.product_name == product_name)
+ & (folders_v2.c.user_id == user_id)
+ )
+ )
+ else:
+ private_workspace_query = None
+
+ if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE:
+ assert workspace_query.workspace_scope in ( # nosec
+ WorkspaceScope.SHARED,
+ WorkspaceScope.ALL,
+ )
+
+ shared_workspace_query = (
+ select(
+ *_SELECTION_ARGS, workspace_access_rights_subquery.c.my_access_rights
+ )
+ .select_from(
+ folders_v2.join(
+ workspace_access_rights_subquery,
+ folders_v2.c.workspace_id
+ == workspace_access_rights_subquery.c.workspace_id,
+ )
+ )
+ .where(
+ (folders_v2.c.product_name == product_name)
+ & (folders_v2.c.user_id.is_(None))
+ )
+ )
else:
- assert workspace_id # nosec
- base_query = base_query.where(folders_v2.c.workspace_id == workspace_id)
+ shared_workspace_query = None
+
+ attributes_filters: list[ColumnElement] = []
- if trashed is not None:
- base_query = base_query.where(
+ if filter_trashed is not None:
+ attributes_filters.append(
(
(folders_v2.c.trashed_at.is_not(None))
& (folders_v2.c.trashed_explicitly.is_(True))
)
- if trashed
+ if filter_trashed
else folders_v2.c.trashed_at.is_(None)
)
+ if folder_query.folder_scope is not FolderScope.ALL:
+ if folder_query.folder_scope == FolderScope.SPECIFIC:
+ attributes_filters.append(
+ folders_v2.c.parent_folder_id == folder_query.folder_id
+ )
+ else:
+ assert folder_query.folder_scope == FolderScope.ROOT # nosec
+ attributes_filters.append(folders_v2.c.parent_folder_id.is_(None))
+
+ ###
+ # Combined
+ ###
+
+ combined_query: CompoundSelect | Select | None = None
+ if private_workspace_query is not None and shared_workspace_query is not None:
+ combined_query = sa.union_all(
+ private_workspace_query.where(sa.and_(*attributes_filters)),
+ shared_workspace_query.where(sa.and_(*attributes_filters)),
+ )
+ elif private_workspace_query is not None:
+ combined_query = private_workspace_query.where(sa.and_(*attributes_filters))
+ elif shared_workspace_query is not None:
+ combined_query = shared_workspace_query.where(sa.and_(*attributes_filters))
+
+ if combined_query is None:
+ msg = f"No valid queries were provided to combine. Workspace scope: {workspace_query.workspace_scope}"
+ raise ValueError(msg)
# Select total count from base_query
- subquery = base_query.subquery()
- count_query = select(func.count()).select_from(subquery)
+ count_query = select(func.count()).select_from(combined_query.subquery())
# Ordering and pagination
if order_by.direction == OrderDirection.ASC:
- list_query = base_query.order_by(asc(getattr(folders_v2.c, order_by.field)))
+ list_query = combined_query.order_by(asc(getattr(folders_v2.c, order_by.field)))
else:
- list_query = base_query.order_by(desc(getattr(folders_v2.c, order_by.field)))
+ list_query = combined_query.order_by(
+ desc(getattr(folders_v2.c, order_by.field))
+ )
list_query = list_query.offset(offset).limit(limit)
async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn:
total_count = await conn.scalar(count_query)
result = await conn.stream(list_query)
- folders: list[FolderDB] = [FolderDB.from_orm(row) async for row in result]
+ folders: list[UserFolderAccessRightsDB] = [
+ UserFolderAccessRightsDB.from_orm(row) async for row in result
+ ]
return cast(int, total_count), folders
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
index e4fffd82fc6..7050205bd7d 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
@@ -28,6 +28,7 @@
from ._exceptions_handlers import handle_plugin_requests_exceptions
from ._models import (
FolderFilters,
+ FolderListFullSearchWithJsonStrQueryParams,
FolderListWithJsonStrQueryParams,
FoldersPathParams,
FoldersRequestContext,
@@ -99,6 +100,46 @@ async def list_folders(request: web.Request):
)
+@routes.get(f"/{VTAG}/folders:search", name="list_folders_full_search")
+@login_required
+@permission_required("folder.read")
+@handle_plugin_requests_exceptions
+async def list_folders_full_search(request: web.Request):
+ req_ctx = FoldersRequestContext.parse_obj(request)
+ query_params: FolderListFullSearchWithJsonStrQueryParams = (
+ parse_request_query_parameters_as(
+ FolderListFullSearchWithJsonStrQueryParams, request
+ )
+ )
+
+ if not query_params.filters:
+ query_params.filters = FolderFilters()
+
+ folders: FolderGetPage = await _folders_api.list_folders_full_search(
+ app=request.app,
+ user_id=req_ctx.user_id,
+ product_name=req_ctx.product_name,
+ trashed=query_params.filters.trashed,
+ offset=query_params.offset,
+ limit=query_params.limit,
+ order_by=parse_obj_as(OrderBy, query_params.order_by),
+ )
+
+ page = Page[FolderGet].parse_obj(
+ paginate_data(
+ chunk=folders.items,
+ request_url=request.url,
+ total=folders.total,
+ limit=query_params.limit,
+ offset=query_params.offset,
+ )
+ )
+ return web.Response(
+ text=page.json(**RESPONSE_MODEL_POLICY),
+ content_type=MIMETYPE_APPLICATION_JSON,
+ )
+
+
@routes.get(f"/{VTAG}/folders/{{folder_id}}", name="get_folder")
@login_required
@permission_required("folder.read")
diff --git a/services/web/server/src/simcore_service_webserver/folders/_models.py b/services/web/server/src/simcore_service_webserver/folders/_models.py
index fb337b5b199..5e48f46fa37 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_models.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_models.py
@@ -33,9 +33,7 @@ class FolderFilters(Filters):
)
-class FolderListWithJsonStrQueryParams(
- PageQueryParameters, FiltersQueryParameters[FolderFilters]
-):
+class FolderListSortParams(BaseModel):
# pylint: disable=unsubscriptable-object
order_by: Json[OrderBy] = Field(
default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC),
@@ -43,14 +41,6 @@ class FolderListWithJsonStrQueryParams(
example='{"field": "name", "direction": "desc"}',
alias="order_by",
)
- folder_id: FolderID | None = Field(
- default=None,
- description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).",
- )
- workspace_id: WorkspaceID | None = Field(
- default=None,
- description="List folders in specific workspace. By default, list in the user private workspace",
- )
@validator("order_by", check_fields=False)
@classmethod
@@ -69,6 +59,22 @@ def _validate_order_by_field(cls, v):
class Config:
extra = Extra.forbid
+
+class FolderListWithJsonStrQueryParams(
+ PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters]
+):
+ folder_id: FolderID | None = Field(
+ default=None,
+ description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).",
+ )
+ workspace_id: WorkspaceID | None = Field(
+ default=None,
+ description="List folders in specific workspace. By default, list in the user private workspace",
+ )
+
+ class Config:
+ extra = Extra.forbid
+
# validators
_null_or_none_str_to_none_validator = validator(
"folder_id", allow_reuse=True, pre=True
@@ -79,6 +85,13 @@ class Config:
)(null_or_none_str_to_none_validator)
+class FolderListFullSearchWithJsonStrQueryParams(
+ PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters]
+):
+ class Config:
+ extra = Extra.forbid
+
+
class RemoveQueryParams(BaseModel):
force: bool = Field(
default=False, description="Force removal (even if resource is active)"
diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
index a959843a969..fa0ab9dbab6 100644
--- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
+++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py
@@ -17,7 +17,6 @@
WorkspaceID,
)
from pydantic import NonNegativeInt
-from simcore_postgres_database.models.groups import user_to_groups
from simcore_postgres_database.models.workspaces import workspaces
from simcore_postgres_database.models.workspaces_access_rights import (
workspaces_access_rights,
@@ -26,10 +25,12 @@
pass_or_acquire_connection,
transaction_context,
)
+from simcore_postgres_database.utils_workspaces_sql import (
+ create_my_workspace_access_rights_subquery,
+)
from sqlalchemy import asc, desc, func
-from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER
from sqlalchemy.ext.asyncio import AsyncConnection
-from sqlalchemy.sql import Subquery, select
+from sqlalchemy.sql import select
from ..db.plugin import get_asyncpg_engine
from .errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError
@@ -98,29 +99,6 @@ async def create_workspace(
).subquery("access_rights_subquery")
-def _create_my_access_rights_subquery(user_id: UserID) -> Subquery:
- return (
- select(
- workspaces_access_rights.c.workspace_id,
- func.json_build_object(
- "read",
- func.max(workspaces_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN),
- "write",
- func.max(workspaces_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN),
- "delete",
- func.max(workspaces_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN),
- ).label("my_access_rights"),
- )
- .select_from(
- workspaces_access_rights.join(
- user_to_groups, user_to_groups.c.gid == workspaces_access_rights.c.gid
- )
- )
- .where(user_to_groups.c.uid == user_id)
- .group_by(workspaces_access_rights.c.workspace_id)
- ).subquery("my_access_rights_subquery")
-
-
async def list_workspaces_for_user(
app: web.Application,
connection: AsyncConnection | None = None,
@@ -131,7 +109,9 @@ async def list_workspaces_for_user(
limit: NonNegativeInt,
order_by: OrderBy,
) -> tuple[int, list[UserWorkspaceAccessRightsDB]]:
- my_access_rights_subquery = _create_my_access_rights_subquery(user_id=user_id)
+ my_access_rights_subquery = create_my_workspace_access_rights_subquery(
+ user_id=user_id
+ )
base_query = (
select(
@@ -175,7 +155,9 @@ async def get_workspace_for_user(
workspace_id: WorkspaceID,
product_name: ProductName,
) -> UserWorkspaceAccessRightsDB:
- my_access_rights_subquery = _create_my_access_rights_subquery(user_id=user_id)
+ my_access_rights_subquery = create_my_workspace_access_rights_subquery(
+ user_id=user_id
+ )
base_query = (
select(
diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
new file mode 100644
index 00000000000..b9da926543e
--- /dev/null
+++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
@@ -0,0 +1,123 @@
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-statements
+
+
+from http import HTTPStatus
+
+import pytest
+from aiohttp.test_utils import TestClient
+from models_library.api_schemas_webserver.folders_v2 import FolderGet
+from pytest_simcore.helpers.assert_checks import assert_status
+from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict
+from pytest_simcore.helpers.webserver_parametrizations import (
+ ExpectedResponse,
+ standard_role_response,
+)
+from servicelib.aiohttp import status
+from simcore_service_webserver.db.models import UserRole
+from simcore_service_webserver.projects.models import ProjectDict
+
+
+@pytest.mark.parametrize(*standard_role_response(), ids=str)
+async def test_folders_user_role_permissions(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ expected: ExpectedResponse,
+):
+ assert client.app
+
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ await assert_status(resp, expected.ok)
+
+
+@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+async def test_folders_full_search(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ expected: HTTPStatus,
+):
+ assert client.app
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert data == []
+
+ # create a new folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
+ root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # create a subfolder folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ f"{url}",
+ json={
+ "name": "My subfolder",
+ "parentFolderId": root_folder["folderId"],
+ },
+ )
+ subfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 2
+
+ # create a sub sub folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ f"{url}",
+ json={
+ "name": "My sub sub folder",
+ "parentFolderId": subfolder_folder["folderId"],
+ },
+ )
+ subsubfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # move sub sub folder to root folder
+ url = client.app.router["replace_folder"].url_for(
+ folder_id=f"{subsubfolder_folder['folderId']}"
+ )
+ resp = await client.put(
+ f"{url}",
+ json={
+ "name": "My Updated Folder",
+ "parentFolderId": None,
+ },
+ )
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert FolderGet.parse_obj(data)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 3
+
+ # Create new user
+ async with LoggedUser(client) as new_logged_user:
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert data == []
+
+ # create a new folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(f"{url}", json={"name": "New user folder"})
+ new_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py
new file mode 100644
index 00000000000..3cfc1a78842
--- /dev/null
+++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py
@@ -0,0 +1,65 @@
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-statements
+
+
+from http import HTTPStatus
+
+import pytest
+from aiohttp.test_utils import TestClient
+from pytest_simcore.helpers.assert_checks import assert_status
+from pytest_simcore.helpers.webserver_login import UserInfoDict
+from servicelib.aiohttp import status
+from simcore_service_webserver.db.models import UserRole
+
+
+@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+async def test_workspaces__list_folders_full_search(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ expected: HTTPStatus,
+ workspaces_clean_db: None,
+):
+ assert client.app
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert data == []
+
+ # create a new folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(f"{url}", json={"name": "My first folder"})
+ root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
+
+ # create a new workspace
+ url = client.app.router["create_workspace"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "My first workspace",
+ "description": "Custom description",
+ "thumbnail": None,
+ },
+ )
+ added_workspace, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # create a folder
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(url.path, json={"name": "My first folder"})
+ root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # list full folder search
+ url = client.app.router["list_folders_full_search"].url_for()
+ resp = await client.get(f"{url}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 2
From 0981f491d19c3317016324908787f7f4f5bc060b Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Tue, 12 Nov 2024 15:52:35 +0100
Subject: [PATCH 08/17] =?UTF-8?q?=F0=9F=8E=A8Maintenance:=20make=20redis?=
=?UTF-8?q?=20client=20use=20the=20client=20name=20to=20ease=20debugging?=
=?UTF-8?q?=20(#6700)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../service-library/src/servicelib/redis.py | 5 +-
packages/service-library/tests/conftest.py | 9 ++-
.../tests/deferred_tasks/example_app.py | 2 +
.../test__base_deferred_handler.py | 1 +
packages/service-library/tests/test_pools.py | 29 +++++---
packages/service-library/tests/test_redis.py | 6 +-
...onection.py => test_redis__reconection.py} | 6 +-
packages/service-library/tests/test_utils.py | 15 ++--
.../modules/redis.py | 6 +-
.../modules/clusters_management_task.py | 3 +-
.../modules/redis.py | 5 +-
.../modules/comp_scheduler/_task.py | 3 +-
.../modules/redis.py | 2 +
.../unit/test_utils_distributed_identifier.py | 2 +-
services/director/requirements/_tools.txt | 68 -------------------
.../services/redis.py | 3 +
services/dynamic-scheduler/tests/conftest.py | 4 +-
.../services/modules/redis.py | 6 +-
.../_meta.py | 2 +-
.../services/modules/redis.py | 6 +-
.../src/simcore_service_storage/redis.py | 5 +-
.../src/simcore_service_webserver/redis.py | 2 +
22 files changed, 86 insertions(+), 104 deletions(-)
rename packages/service-library/tests/{test_redis__recoonection.py => test_redis__reconection.py} (88%)
diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py
index 03847ae0b04..7bbb8b2b71b 100644
--- a/packages/service-library/src/servicelib/redis.py
+++ b/packages/service-library/src/servicelib/redis.py
@@ -60,6 +60,7 @@ async def _cancel_or_warn(task: Task) -> None:
@dataclass
class RedisClientSDK:
redis_dsn: str
+ client_name: str
decode_responses: bool = _DEFAULT_DECODE_RESPONSES
health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL
@@ -86,7 +87,7 @@ def __post_init__(self):
socket_connect_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(),
encoding="utf-8",
decode_responses=self.decode_responses,
- auto_close_connection_pool=True,
+ client_name=self.client_name,
)
@retry(**RedisRetryPolicyUponInitialization(_logger).kwargs)
@@ -238,6 +239,7 @@ class RedisClientsManager:
databases_configs: set[RedisManagerDBConfig]
settings: RedisSettings
+ client_name: str
_client_sdks: dict[RedisDatabase, RedisClientSDK] = field(default_factory=dict)
@@ -247,6 +249,7 @@ async def setup(self) -> None:
redis_dsn=self.settings.build_redis_dsn(config.database),
decode_responses=config.decode_responses,
health_check_interval=config.health_check_interval,
+ client_name=f"{self.client_name}",
)
for client in self._client_sdks.values():
diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py
index 927ff75477f..7527ee67a14 100644
--- a/packages/service-library/tests/conftest.py
+++ b/packages/service-library/tests/conftest.py
@@ -80,9 +80,12 @@ async def _(
database: RedisDatabase, decode_response: bool = True # noqa: FBT002
) -> AsyncIterator[RedisClientSDK]:
redis_resources_dns = redis_service.build_redis_dsn(database)
- client = RedisClientSDK(redis_resources_dns, decode_responses=decode_response)
+ client = RedisClientSDK(
+ redis_resources_dns, decode_responses=decode_response, client_name="pytest"
+ )
assert client
assert client.redis_dsn == redis_resources_dns
+ assert client.client_name == "pytest"
await client.setup()
yield client
@@ -94,7 +97,9 @@ async def _cleanup_redis_data(clients_manager: RedisClientsManager) -> None:
await clients_manager.client(db).redis.flushall()
async with RedisClientsManager(
- {RedisManagerDBConfig(db) for db in RedisDatabase}, redis_service
+ {RedisManagerDBConfig(db) for db in RedisDatabase},
+ redis_service,
+ client_name="pytest",
) as clients_manager:
await _cleanup_redis_data(clients_manager)
yield _
diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py
index 0ba848178d8..61450a9cb16 100644
--- a/packages/service-library/tests/deferred_tasks/example_app.py
+++ b/packages/service-library/tests/deferred_tasks/example_app.py
@@ -60,6 +60,7 @@ def __init__(self, redis_settings: RedisSettings, port: int) -> None:
self.redis: Redis = RedisClientSDK(
redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
decode_responses=True,
+ client_name="example_app",
).redis
self.port = port
@@ -84,6 +85,7 @@ def __init__(
self._redis_client = RedisClientSDK(
redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
decode_responses=False,
+ client_name="example_app",
)
self._manager = DeferredManager(
rabbit_settings,
diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
index a5b45ed80d9..3aa5b53e7f5 100644
--- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
+++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py
@@ -55,6 +55,7 @@ async def redis_client_sdk(
sdk = RedisClientSDK(
redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS),
decode_responses=False,
+ client_name="pytest",
)
await sdk.setup()
yield sdk
diff --git a/packages/service-library/tests/test_pools.py b/packages/service-library/tests/test_pools.py
index 13c62ad0a3a..1604ba10147 100644
--- a/packages/service-library/tests/test_pools.py
+++ b/packages/service-library/tests/test_pools.py
@@ -1,4 +1,4 @@
-from asyncio import BaseEventLoop
+import asyncio
from concurrent.futures import ProcessPoolExecutor
from servicelib.pools import (
@@ -11,17 +11,25 @@ def return_int_one() -> int:
return 1
-async def test_default_thread_pool_executor(event_loop: BaseEventLoop) -> None:
- assert await event_loop.run_in_executor(None, return_int_one) == 1
+async def test_default_thread_pool_executor() -> None:
+ assert await asyncio.get_running_loop().run_in_executor(None, return_int_one) == 1
-async def test_blocking_process_pool_executor(event_loop: BaseEventLoop) -> None:
- assert await event_loop.run_in_executor(ProcessPoolExecutor(), return_int_one) == 1
+async def test_blocking_process_pool_executor() -> None:
+ assert (
+ await asyncio.get_running_loop().run_in_executor(
+ ProcessPoolExecutor(), return_int_one
+ )
+ == 1
+ )
-async def test_non_blocking_process_pool_executor(event_loop: BaseEventLoop) -> None:
+async def test_non_blocking_process_pool_executor() -> None:
with non_blocking_process_pool_executor() as executor:
- assert await event_loop.run_in_executor(executor, return_int_one) == 1
+ assert (
+ await asyncio.get_running_loop().run_in_executor(executor, return_int_one)
+ == 1
+ )
async def test_same_pool_instances() -> None:
@@ -36,9 +44,12 @@ async def test_different_pool_instances() -> None:
assert first != second
-async def test_non_blocking_thread_pool_executor(event_loop: BaseEventLoop) -> None:
+async def test_non_blocking_thread_pool_executor() -> None:
with non_blocking_thread_pool_executor() as executor:
- assert await event_loop.run_in_executor(executor, return_int_one) == 1
+ assert (
+ await asyncio.get_running_loop().run_in_executor(executor, return_int_one)
+ == 1
+ )
async def test_same_thread_pool_instances() -> None:
diff --git a/packages/service-library/tests/test_redis.py b/packages/service-library/tests/test_redis.py
index 7a3fa9b52d6..c120f85d344 100644
--- a/packages/service-library/tests/test_redis.py
+++ b/packages/service-library/tests/test_redis.py
@@ -277,7 +277,9 @@ async def test_redis_client_sdks_manager(
RedisManagerDBConfig(db) for db in RedisDatabase
}
manager = RedisClientsManager(
- databases_configs=all_redis_configs, settings=redis_service
+ databases_configs=all_redis_configs,
+ settings=redis_service,
+ client_name="pytest",
)
async with manager:
@@ -290,7 +292,7 @@ async def test_redis_client_sdk_setup_shutdown(
):
# setup
redis_resources_dns = redis_service.build_redis_dsn(RedisDatabase.RESOURCES)
- client = RedisClientSDK(redis_resources_dns)
+ client = RedisClientSDK(redis_resources_dns, client_name="pytest")
assert client
assert client.redis_dsn == redis_resources_dns
diff --git a/packages/service-library/tests/test_redis__recoonection.py b/packages/service-library/tests/test_redis__reconection.py
similarity index 88%
rename from packages/service-library/tests/test_redis__recoonection.py
rename to packages/service-library/tests/test_redis__reconection.py
index 89902a4b66e..8fe5a718527 100644
--- a/packages/service-library/tests/test_redis__recoonection.py
+++ b/packages/service-library/tests/test_redis__reconection.py
@@ -21,9 +21,9 @@ async def test_redis_client_sdk_lost_connection(
docker_client: docker.client.DockerClient,
):
redis_client_sdk = RedisClientSDK(
- redis_service.build_redis_dsn(RedisDatabase.RESOURCES)
+ redis_service.build_redis_dsn(RedisDatabase.RESOURCES), client_name="pytest"
)
-
+ assert redis_client_sdk.client_name == "pytest"
await redis_client_sdk.setup()
assert await redis_client_sdk.ping() is True
@@ -41,3 +41,5 @@ async def test_redis_client_sdk_lost_connection(
):
with attempt:
assert await redis_client_sdk.ping() is False
+
+ await redis_client_sdk.shutdown()
diff --git a/packages/service-library/tests/test_utils.py b/packages/service-library/tests/test_utils.py
index 7bfcd4cee69..ebcad03b031 100644
--- a/packages/service-library/tests/test_utils.py
+++ b/packages/service-library/tests/test_utils.py
@@ -5,7 +5,6 @@
import asyncio
from collections.abc import AsyncIterator, Awaitable, Coroutine, Iterator
from copy import copy, deepcopy
-from random import randint
from typing import NoReturn
from unittest import mock
@@ -66,7 +65,6 @@ def mock_logger(mocker: MockerFixture) -> Iterator[mock.Mock]:
async def test_logged_gather(
- event_loop: asyncio.AbstractEventLoop,
coros: list[Coroutine],
mock_logger: mock.Mock,
):
@@ -79,7 +77,7 @@ async def test_logged_gather(
# NOTE: only first error in the list is raised, since it is not RuntimeError, that task
assert isinstance(excinfo.value, ValueError)
- for task in asyncio.all_tasks(event_loop):
+ for task in asyncio.all_tasks(asyncio.get_running_loop()):
if task is not asyncio.current_task():
# info
task.print_stack()
@@ -148,7 +146,7 @@ async def test_fire_and_forget_1000s_tasks(faker: Faker):
tasks_collection = set()
async def _some_task(n: int) -> str:
- await asyncio.sleep(randint(1, 3))
+ await asyncio.sleep(faker.random_int(1, 3))
return f"I'm great since I slept a bit, and by the way I'm task {n}"
for n in range(1000):
@@ -251,7 +249,6 @@ async def test_limited_gather_limits(
async def test_limited_gather(
- event_loop: asyncio.AbstractEventLoop,
coros: list[Coroutine],
mock_logger: mock.Mock,
):
@@ -266,7 +263,7 @@ async def test_limited_gather(
unfinished_tasks = [
task
- for task in asyncio.all_tasks(event_loop)
+ for task in asyncio.all_tasks(asyncio.get_running_loop())
if task is not asyncio.current_task()
]
final_results = await asyncio.gather(*unfinished_tasks, return_exceptions=True)
@@ -288,9 +285,7 @@ async def test_limited_gather_wo_raising(
assert results[5] == 5
-async def test_limited_gather_cancellation(
- event_loop: asyncio.AbstractEventLoop, slow_successful_coros_list: list[Coroutine]
-):
+async def test_limited_gather_cancellation(slow_successful_coros_list: list[Coroutine]):
task = asyncio.create_task(limited_gather(*slow_successful_coros_list, limit=0))
await asyncio.sleep(3)
task.cancel()
@@ -300,7 +295,7 @@ async def test_limited_gather_cancellation(
# check all coros are cancelled
unfinished_tasks = [
task
- for task in asyncio.all_tasks(event_loop)
+ for task in asyncio.all_tasks(asyncio.get_running_loop())
if task is not asyncio.current_task()
]
assert not unfinished_tasks
diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py
index 29fed9c6b97..60ce15df956 100644
--- a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py
+++ b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py
@@ -5,6 +5,8 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from .._meta import APP_NAME
+
logger = logging.getLogger(__name__)
@@ -13,7 +15,9 @@ async def on_startup() -> None:
app.state.redis_client_sdk = None
settings: RedisSettings = app.state.settings.AUTOSCALING_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_client_sdk = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
async def on_shutdown() -> None:
diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py
index 410edba1efb..d2e8f6e4c6f 100644
--- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py
+++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py
@@ -6,6 +6,7 @@
from servicelib.background_task import start_periodic_task, stop_periodic_task
from servicelib.redis_utils import exclusive
+from .._meta import APP_NAME
from ..core.settings import ApplicationSettings
from ..modules.redis import get_redis_client
from .clusters_management_core import check_clusters
@@ -19,7 +20,7 @@ def on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]:
async def _startup() -> None:
app_settings: ApplicationSettings = app.state.settings
- lock_key = f"{app.title}:clusters-management_lock"
+ lock_key = f"{APP_NAME}:clusters-management_lock"
lock_value = json.dumps({})
app.state.clusters_cleaning_task = start_periodic_task(
exclusive(get_redis_client(app), lock_key=lock_key, lock_value=lock_value)(
diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py
index 08f0ff54f73..a0a0d6a8745 100644
--- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py
+++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py
@@ -5,6 +5,7 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from .._meta import APP_NAME
from ..core.settings import get_application_settings
logger = logging.getLogger(__name__)
@@ -15,7 +16,9 @@ async def on_startup() -> None:
app.state.redis_client_sdk = None
settings: RedisSettings = get_application_settings(app).CLUSTERS_KEEPER_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_client_sdk = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
async def on_shutdown() -> None:
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
index 0e1c79ff8b6..989b310687c 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
@@ -10,6 +10,7 @@
from servicelib.redis_utils import exclusive
from settings_library.redis import RedisDatabase
+from ..._meta import APP_NAME
from . import _scheduler_factory
_logger = logging.getLogger(__name__)
@@ -26,7 +27,7 @@ async def start_scheduler() -> None:
_logger, level=logging.INFO, msg="starting computational scheduler"
):
redis_clients_manager: RedisClientsManager = app.state.redis_clients_manager
- lock_key = f"{app.title}:computational_scheduler"
+ lock_key = f"{APP_NAME}:computational_scheduler"
app.state.scheduler = scheduler = await _scheduler_factory.create_from_db(
app
)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py
index 7cb6f86cc82..e7da01afef7 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py
@@ -2,6 +2,7 @@
from servicelib.redis import RedisClientsManager, RedisManagerDBConfig
from settings_library.redis import RedisDatabase
+from .._meta import APP_NAME
from ..core.settings import AppSettings
@@ -18,6 +19,7 @@ async def on_startup() -> None:
)
},
settings=settings.REDIS,
+ client_name=APP_NAME,
)
await redis_clients_manager.setup()
diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py
index ce200feef97..8c316876a9c 100644
--- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py
+++ b/services/director-v2/tests/unit/test_utils_distributed_identifier.py
@@ -171,7 +171,7 @@ async def redis_client_sdk(
RedisDatabase.DISTRIBUTED_IDENTIFIERS
)
- client = RedisClientSDK(redis_resources_dns)
+ client = RedisClientSDK(redis_resources_dns, client_name="pytest")
assert client
assert client.redis_dsn == redis_resources_dns
await client.setup()
diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt
index 24945ba6807..e69de29bb2d 100644
--- a/services/director/requirements/_tools.txt
+++ b/services/director/requirements/_tools.txt
@@ -1,68 +0,0 @@
-#
-# This file is autogenerated by pip-compile with python 3.6
-# To update, run:
-#
-# pip-compile --output-file=requirements/_tools.txt --strip-extras requirements/_tools.in
-#
-appdirs==1.4.4
- # via black
-black==20.8b1
- # via -r requirements/_tools.in
-bump2version==1.0.1
- # via -r requirements/_tools.in
-click==8.0.3
- # via
- # black
- # pip-tools
-dataclasses==0.7
- # via
- # -c requirements/_base.txt
- # -c requirements/_test.txt
- # black
-importlib-metadata==2.0.0
- # via
- # -c requirements/_test.txt
- # click
- # pep517
-mypy-extensions==0.4.3
- # via black
-pathspec==0.9.0
- # via black
-pep517==0.12.0
- # via pip-tools
-pip==24.3.1
- # via pip-tools
-pip-tools==6.4.0
- # via -r requirements/_tools.in
-pyyaml==5.4
- # via
- # -c requirements/_base.txt
- # -c requirements/_test.txt
- # watchdog
-regex==2022.1.18
- # via black
-toml==0.10.2
- # via
- # -c requirements/_test.txt
- # black
-tomli==1.2.3
- # via pep517
-typed-ast==1.4.1
- # via
- # -c requirements/_test.txt
- # black
-typing-extensions==4.0.1
- # via black
-watchdog==2.1.6
- # via -r requirements/_tools.in
-wheel==0.37.1
- # via pip-tools
-zipp==3.4.0
- # via
- # -c requirements/_test.txt
- # importlib-metadata
- # pep517
-
-# The following packages are considered to be unsafe in a requirements file:
-# pip
-# setuptools
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
index 84131eaf54b..ff7d53920bf 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py
@@ -4,6 +4,8 @@
from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig
from settings_library.redis import RedisDatabase, RedisSettings
+from .._meta import APP_NAME
+
_DECODE_DBS: Final[set[RedisDatabase]] = {
RedisDatabase.LOCKS,
}
@@ -24,6 +26,7 @@ async def on_startup() -> None:
{RedisManagerDBConfig(x, decode_responses=False) for x in _BINARY_DBS}
| {RedisManagerDBConfig(x, decode_responses=True) for x in _DECODE_DBS},
settings,
+ client_name=APP_NAME,
)
await manager.setup()
diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py
index 8b672b0408e..ae2e723708e 100644
--- a/services/dynamic-scheduler/tests/conftest.py
+++ b/services/dynamic-scheduler/tests/conftest.py
@@ -135,7 +135,9 @@ async def app(
@pytest.fixture
async def remove_redis_data(redis_service: RedisSettings) -> None:
async with RedisClientsManager(
- {RedisManagerDBConfig(x) for x in RedisDatabase}, redis_service
+ {RedisManagerDBConfig(x) for x in RedisDatabase},
+ redis_service,
+ client_name="pytest",
) as manager:
await logged_gather(
*[manager.client(d).redis.flushall() for d in RedisDatabase]
diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py
index 20cbcc0a4db..4876e5b8b21 100644
--- a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py
+++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py
@@ -5,6 +5,8 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from ..._meta import APP_NAME
+
logger = logging.getLogger(__name__)
@@ -13,7 +15,9 @@ async def on_startup() -> None:
app.state.redis_lock_client_sdk = None
settings: RedisSettings = app.state.settings.EFS_GUARDIAN_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_lock_client_sdk = lock_client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_lock_client_sdk = lock_client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await lock_client.setup()
async def on_shutdown() -> None:
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py
index 63e86cce819..ceb639ddcc9 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py
@@ -19,7 +19,7 @@
API_VERSION: Final[VersionStr] = info.__version__
API_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, info.api_prefix_path_tag)
SUMMARY: Final[str] = info.get_summary()
-
+APP_NAME: Final[str] = PROJECT_NAME
# NOTE: https://texteditor.com/ascii-frames/
APP_STARTED_BANNER_MSG = r"""
diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py
index 922b0e7e49e..0aece119077 100644
--- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py
+++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py
@@ -5,6 +5,8 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from ..._meta import APP_NAME
+
logger = logging.getLogger(__name__)
@@ -13,7 +15,9 @@ async def on_startup() -> None:
app.state.redis_client_sdk = None
settings: RedisSettings = app.state.settings.RESOURCE_USAGE_TRACKER_REDIS
redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS)
- app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn)
+ app.state.redis_client_sdk = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
async def on_shutdown() -> None:
diff --git a/services/storage/src/simcore_service_storage/redis.py b/services/storage/src/simcore_service_storage/redis.py
index 2380bd332dc..f18f891ec19 100644
--- a/services/storage/src/simcore_service_storage/redis.py
+++ b/services/storage/src/simcore_service_storage/redis.py
@@ -5,6 +5,7 @@
from servicelib.redis import RedisClientSDK
from settings_library.redis import RedisDatabase, RedisSettings
+from ._meta import APP_NAME
from .constants import APP_CONFIG_KEY
from .settings import Settings
@@ -20,7 +21,9 @@ async def _setup(app: web.Application):
assert settings.STORAGE_REDIS # nosec
redis_settings: RedisSettings = settings.STORAGE_REDIS
redis_locks_dsn = redis_settings.build_redis_dsn(RedisDatabase.LOCKS)
- app[_APP_REDIS_KEY] = client = RedisClientSDK(redis_locks_dsn)
+ app[_APP_REDIS_KEY] = client = RedisClientSDK(
+ redis_locks_dsn, client_name=APP_NAME
+ )
await client.setup()
yield
diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py
index deee93f1fbd..1a1427cc09c 100644
--- a/services/web/server/src/simcore_service_webserver/redis.py
+++ b/services/web/server/src/simcore_service_webserver/redis.py
@@ -7,6 +7,7 @@
from settings_library.redis import RedisDatabase, RedisSettings
from ._constants import APP_SETTINGS_KEY
+from ._meta import APP_NAME
_logger = logging.getLogger(__name__)
@@ -44,6 +45,7 @@ async def setup_redis_client(app: web.Application):
)
},
settings=redis_settings,
+ client_name=APP_NAME,
)
await manager.setup()
From 819df89656bb0c9576234512324b783475f70b4c Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Tue, 12 Nov 2024 17:02:44 +0100
Subject: [PATCH 09/17] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Make=20the=20?=
=?UTF-8?q?Light=20Theme=20less=20white=20(#6681)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../client/source/class/osparc/Application.js | 9 +-
.../source/class/osparc/auth/LoginPageS4L.js | 2 +-
.../class/osparc/desktop/SlideshowView.js | 2 +-
.../class/osparc/desktop/WorkbenchView.js | 26 ++---
.../class/osparc/node/BootOptionsView.js | 5 -
.../source/class/osparc/node/LifeCycleView.js | 5 -
.../osparc/node/UpdateResourceLimitsView.js | 5 -
.../notification/RibbonNotifications.js | 5 +-
.../source/class/osparc/theme/ColorDark.js | 81 +++++++-------
.../source/class/osparc/theme/ColorLight.js | 100 +++++++++---------
.../source/class/osparc/widget/NodeOptions.js | 9 --
.../osparc/workbench/DiskUsageIndicator.js | 2 +-
12 files changed, 100 insertions(+), 151 deletions(-)
diff --git a/services/static-webserver/client/source/class/osparc/Application.js b/services/static-webserver/client/source/class/osparc/Application.js
index c5f760188e5..0b18c01bd22 100644
--- a/services/static-webserver/client/source/class/osparc/Application.js
+++ b/services/static-webserver/client/source/class/osparc/Application.js
@@ -222,9 +222,6 @@ qx.Class.define("osparc.Application", {
__setDeviceSpecificIcons: function() {
const isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !window.MSStream;
const isAndroid = /android/i.test(navigator.userAgent);
- const isWindows = /windows/i.test(navigator.userAgent);
- // const productColor = qx.theme.manager.Color.getInstance().resolve("product-color");
- // const backgroundColor = qx.theme.manager.Color.getInstance().resolve("primary-background-color");
// default icons
this.__updateMetaTags();
this.__setDefaultIcons()
@@ -232,8 +229,6 @@ qx.Class.define("osparc.Application", {
this.__setIOSpIcons();
} else if (isAndroid) {
this.__setGoogleIcons();
- } else if (isWindows) {
- // this.__updateBrowserConfig(this.__getProductMetaData().productColor);
}
},
@@ -246,16 +241,14 @@ qx.Class.define("osparc.Application", {
}
const productColor = qx.theme.manager.Color.getInstance().resolve("product-color");
- const backgroundColor = qx.theme.manager.Color.getInstance().resolve("primary-background-color");
return {
productName: productName,
productColor: productColor,
- backgroundColor: backgroundColor
}
},
__updateMetaTags: function() {
- // check device type and only set the icons for the divice type
+ // check device type and only set the icons for the device type
// i.e iOS, Android or windows etc
const themeColorMeta = document.querySelector("meta[name='theme-color']");
const tileColorMeta = document.querySelector("meta[name='msapplication-TileColor']");
diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js
index 8118ecdc9f8..1e7cf123b37 100644
--- a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js
+++ b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js
@@ -29,7 +29,7 @@ qx.Class.define("osparc.auth.LoginPageS4L", {
const layout = new qx.ui.layout.HBox();
this._setLayout(layout);
- this.setBackgroundColor("primary-background-color");
+ this.setBackgroundColor("rgba(0, 20, 46, 1)");
this._removeAll();
diff --git a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js
index 593088bc4cd..e05a37f56a1 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js
@@ -24,7 +24,7 @@ qx.Class.define("osparc.desktop.SlideshowView", {
this._setLayout(new qx.ui.layout.VBox());
const slideshowToolbar = this.__slideshowToolbar = new osparc.desktop.SlideshowToolbar().set({
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
const collapseWithUserMenu = this.__collapseWithUserMenu = new osparc.desktop.CollapseWithUserMenu();
diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js
index 1daeea1c0f1..accb850ab5d 100644
--- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js
+++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js
@@ -40,16 +40,10 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
TAB_BUTTON_HEIGHT: 46,
decorateSplitter: function(splitter) {
- const colorManager = qx.theme.manager.Color.getInstance();
- const binaryColor = osparc.utils.Utils.getRoundedBinaryColor(colorManager.resolve("background-main"));
splitter.set({
width: 2,
- backgroundColor: binaryColor
+ backgroundColor: "workbench-view-splitter"
});
- colorManager.addListener("changeTheme", () => {
- const newBinaryColor = osparc.utils.Utils.getRoundedBinaryColor(colorManager.resolve("background-main"));
- splitter.setBackgroundColor(newBinaryColor);
- }, this);
},
decorateSlider: function(slider) {
@@ -202,7 +196,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
control = new qx.ui.tabview.TabView().set({
contentPadding: osparc.widget.CollapsibleViewLight.CARET_WIDTH + 2, // collapse bar + padding
contentPaddingRight: 2,
- backgroundColor: this.self().PRIMARY_COL_BG_COLOR,
barPosition: "top"
});
const collapsibleViewLeft = this.getChildControl("collapsible-view-left");
@@ -322,7 +315,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
const topBar = tabViewPrimary.getChildControl("bar");
topBar.set({
height: this.self().TAB_BUTTON_HEIGHT,
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
this.__addTopBarSpacer(topBar);
@@ -392,7 +385,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
const topBar = tabViewSecondary.getChildControl("bar");
topBar.set({
height: this.self().TAB_BUTTON_HEIGHT,
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
this.__addTopBarSpacer(topBar);
@@ -427,7 +420,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
topBar.set({
height: this.self().TAB_BUTTON_HEIGHT,
alignY: "top",
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
this.__addTopBarSpacer(topBar);
@@ -483,7 +476,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
__addTopBarSpacer: function(tabViewTopBar) {
const spacer = new qx.ui.core.Widget().set({
- backgroundColor: "tab_navigation_bar_background_color"
+ backgroundColor: "workbench-view-navbar"
});
tabViewTopBar.add(spacer, {
flex: 1
@@ -492,7 +485,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
__createCollapsibleViewSpacer: function() {
const spacer = new qx.ui.core.Widget().set({
- backgroundColor: "tab_navigation_bar_background_color",
+ backgroundColor: "workbench-view-navbar",
height: this.self().TAB_BUTTON_HEIGHT
});
return spacer;
@@ -1079,13 +1072,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", {
const nodeOptions = new osparc.widget.NodeOptions(node);
nodeOptions.buildLayout();
- [
- "versionChanged",
- "bootModeChanged",
- "limitsChanged"
- ].forEach(eventName => {
- nodeOptions.addListener(eventName, () => this.__populateSecondaryColumn(node));
- });
return nodeOptions;
},
diff --git a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js
index a6c38981dff..291c028422d 100644
--- a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js
+++ b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js
@@ -18,10 +18,6 @@
qx.Class.define("osparc.node.BootOptionsView", {
extend: osparc.node.ServiceOptionsView,
- events: {
- "bootModeChanged": "qx.event.type.Event"
- },
-
members: {
_applyNode: function(node) {
if (node.hasBootModes()) {
@@ -61,7 +57,6 @@ qx.Class.define("osparc.node.BootOptionsView", {
setTimeout(() => {
buttonsLayout.setEnabled(true);
node.requestStartNode();
- this.fireEvent("bootModeChanged");
}, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL);
}
}, this);
diff --git a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js
index 2cdfb2c1f74..5f810b18799 100644
--- a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js
+++ b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js
@@ -18,10 +18,6 @@
qx.Class.define("osparc.node.LifeCycleView", {
extend: osparc.node.ServiceOptionsView,
- events: {
- "versionChanged": "qx.event.type.Event"
- },
-
members: {
_applyNode: function(node) {
if (node.isUpdatable() || node.isDeprecated() || node.isRetired()) {
@@ -125,7 +121,6 @@ qx.Class.define("osparc.node.LifeCycleView", {
setTimeout(() => {
updateButton.setFetching(false);
node.requestStartNode();
- this.fireEvent("versionChanged");
}, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL);
});
diff --git a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js
index f6770a7e675..3c75815c296 100644
--- a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js
+++ b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js
@@ -18,10 +18,6 @@
qx.Class.define("osparc.node.UpdateResourceLimitsView", {
extend: osparc.node.ServiceOptionsView,
- events: {
- "limitsChanged": "qx.event.type.Event"
- },
-
members: {
__resourceFields: null,
__saveBtn: null,
@@ -159,7 +155,6 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", {
osparc.data.Resources.fetch("nodesInStudyResources", "put", params)
.then(() => {
osparc.FlashMessenger.getInstance().logAs(this.tr("Limits successfully updated"));
- this.fireEvent("limitsChanged");
})
.catch(err => {
console.error(err);
diff --git a/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js b/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js
index 1cbe3b5f7ea..b2ea90b2b8e 100644
--- a/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js
+++ b/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js
@@ -97,15 +97,14 @@ qx.Class.define("osparc.notification.RibbonNotifications", {
if (notification.getType() === "announcement") {
const dontShowButton = new qx.ui.form.Button(this.tr("Don't show again")).set({
- backgroundColor: "transparent",
- textColor: "strong-text",
+ appearance: "strong-button",
alignY: "middle",
padding: 4,
allowGrowX: false,
allowGrowY: false,
marginLeft: 15
});
- osparc.utils.Utils.addBorder(dontShowButton, 1, qx.theme.manager.Color.getInstance().resolve("strong-text"));
+ osparc.utils.Utils.addBorder(dontShowButton, 1, qx.theme.manager.Color.getInstance().resolve("text"));
dontShowButton.addListener("tap", () => {
this.removeNotification(notification);
osparc.utils.Utils.localCache.setDontShowAnnouncement(notification.announcementId);
diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js
index ca275a2371d..fda2ccd25a4 100644
--- a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js
+++ b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js
@@ -2,29 +2,27 @@ qx.Theme.define("osparc.theme.ColorDark", {
include: osparc.theme.mixin.Color,
colors: {
+ // 105-0
"c00": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105),
- "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 100),
- "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 95),
- "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 85),
- "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 80),
- "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 70),
- "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 55),
- "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 45),
- "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 35),
- "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 30),
- "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 25),
- "c11": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 20),
- "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 15),
- "c13": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 8),
- "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0),
+ "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-5),
+ "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-10),
+ "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-20),
+ "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-25),
+ "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-35),
+ "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-50),
+ "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-60),
+ "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-70),
+ "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-75),
+ "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-80),
+ "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-90),
+ "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-105),
"product-color": "rgba(0, 144, 208, 1)", // override in product
"strong-main": "product-color",
- "strong-text": "rgba(255, 255, 255, 1)",
"a-bit-transparent": "rgba(0, 0, 0, 0.4)",
// main
- "background-main": "#222",
+ "background-main": "c01",
"background-main-1": "c02",
"background-main-2": "c03",
"background-main-3": "c04",
@@ -34,10 +32,7 @@ qx.Theme.define("osparc.theme.ColorDark", {
"background-card-overlay": "rgba(25, 33, 37, 0.8)",
"background-workspace-card-overlay": "rgb(35, 93, 122)",
- "primary-background-color": "rgba(0, 20, 46, 1)",
"navigation_bar_background_color": "rgba(1, 18, 26, 0.8)",
- "tab_navigation_bar_background_color": "c00",
- "modal-backdrop": "rgba(8, 9, 13, 1)",
"fab_text": "contrasted-text-dark",
"fab-background": "rgba(255, 255, 255, 0.2)",
"input_background": "#213248",
@@ -58,18 +53,18 @@ qx.Theme.define("osparc.theme.ColorDark", {
"link": "rgba(10, 182, 255, 1)",
// shadows
- "bg-shadow": "background-main-5",
- "box-shadow": "rgba(0,0,0, 0.15)",
+ "bg-shadow": "background-main-5",
+ "box-shadow": "rgba(0, 0, 0, 0.15)",
"shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow",
// window
"window-popup-background": "rgba(66, 66, 66, 1)",
"window-caption-background": "background-main",
- "window-caption-background-active": "background-main-3",
+ "window-caption-background-active": "background-main-3",
"window-caption-text": "text",
"window-caption-text-active": "c12",
- "window-border": "background-main-2",
- "window-border-inner": "background-main-1",
+ "window-border": "background-main-2",
+ "window-border-inner": "background-main-1",
// material-button
"material-button-background": "fab-background",
@@ -88,10 +83,10 @@ qx.Theme.define("osparc.theme.ColorDark", {
// backgrounds
"background-selected": "default-button-background",
- "background-selected-disabled": "default-button-disabled",
- "background-selected-dark": "product-color",
+ "background-selected-disabled": "default-button-disabled",
+ "background-selected-dark": "product-color",
"background-disabled": "background-main",
- "background-disabled-checked": "background-main-1",
+ "background-disabled-checked": "background-main-1",
"background-pane": "background-main",
// tabview
@@ -102,23 +97,23 @@ qx.Theme.define("osparc.theme.ColorDark", {
"tabview-button-background": "transparent",
// scrollbar
- "scrollbar-passive": "background-main-4",
- "scrollbar-active": "background-main-5",
+ "scrollbar-passive": "background-main-4",
+ "scrollbar-active": "background-main-5",
// form
"button": "background-main-4",
- "button-border": "background-main-5",
+ "button-border": "background-main-5",
"button-border-hovered": "c07",
- "button-box": "background-main-3",
- "button-box-pressed": "background-main-4",
+ "button-box": "background-main-3",
+ "button-box-pressed": "background-main-4",
"border-lead": "c07",
// group box
- "white-box-border": "background-main-2",
+ "white-box-border": "background-main-2",
// borders
// 'border-main' is an alias of 'background-selected' (compatibility reasons)
- "border": "background-main-3",
+ "border": "background-main-3",
"border-focused": "c09",
"border-invalid": "failed-red",
"border-disabled": "background-main",
@@ -134,13 +129,13 @@ qx.Theme.define("osparc.theme.ColorDark", {
"table-header": "background-main",
"table-header-foreground": "c09",
"table-header-border": "c07",
- "table-focus-indicator": "background-main-5",
+ "table-focus-indicator": "background-main-5",
// used in table code
"table-header-cell": "background-main",
- "table-row-background-focused-selected": "background-main-4",
- "table-row-background-focused": "background-main-3",
- "table-row-background-selected": "background-main-4",
+ "table-row-background-focused-selected": "background-main-4",
+ "table-row-background-focused": "background-main-3",
+ "table-row-background-selected": "background-main-4",
"table-row-background-even": "background-main",
"table-row-background-odd": "background-main",
@@ -156,11 +151,11 @@ qx.Theme.define("osparc.theme.ColorDark", {
"progressive-table-header": "c08",
"progressive-table-row-background-even": "background-main",
"progressive-table-row-background-odd": "background-main",
- "progressive-progressbar-background": "background-main",
+ "progressive-progressbar-background": "background-main",
"progressive-progressbar-indicator-done": "background-main",
- "progressive-progressbar-indicator-undone": "background-main-1",
- "progressive-progressbar-percent-background": "background-main",
- "progressive-progressbar-percent-text": "background-main-1",
+ "progressive-progressbar-indicator-undone": "background-main-1",
+ "progressive-progressbar-percent-background": "background-main",
+ "progressive-progressbar-percent-text": "background-main-1",
@@ -168,6 +163,8 @@ qx.Theme.define("osparc.theme.ColorDark", {
"workbench-edge-comp-active": "#777777",
"workbench-edge-api-active": "#BBBBBB",
"workbench-start-hint": "#505050",
+ "workbench-view-navbar": "c00",
+ "workbench-view-splitter": "#000000",
"node-background": "rgba(113, 157, 181, 0.5)",
"node-selected-background": "strong-main",
diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js
index 54f1e83d0ea..c1a6bfb5783 100644
--- a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js
+++ b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js
@@ -2,29 +2,27 @@ qx.Theme.define("osparc.theme.ColorLight", {
include: osparc.theme.mixin.Color,
colors: {
+ // 0-105
"c00": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0),
- "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 8),
- "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 15),
- "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 25),
- "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 35),
- "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 45),
- "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 55),
- "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 60),
- "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 65),
- "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 70),
- "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 80),
- "c11": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 85),
- "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 95),
- "c13": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 100),
- "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105),
+ "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+5),
+ "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+10),
+ "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+20),
+ "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+25),
+ "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+35),
+ "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+50),
+ "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+60),
+ "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+70),
+ "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+75),
+ "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+80),
+ "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+90),
+ "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+105),
"product-color": "rgba(0, 144, 208, 1)", // override in product
"strong-main": "product-color",
- "strong-text": "background-main-1",
"a-bit-transparent": "rgba(255, 255, 255, 0.4)",
// main
- "background-main": "rgba(250,250,250, 1)", // Is manipulated
+ "background-main": "c01",
"background-main-1": "c02",
"background-main-2": "c03",
"background-main-3": "c04",
@@ -34,12 +32,9 @@ qx.Theme.define("osparc.theme.ColorLight", {
"background-card-overlay": "rgba(229, 229, 229, 0.8)",
"background-workspace-card-overlay": "rgb(165, 223, 252)",
- "primary-background-color": "rgba(255, 255, 255, 1)",
"navigation_bar_background_color": "rgba(229, 229, 229, 0.8)",
- "tab_navigation_bar_background_color": "c00",
- "modal-backdrop": "rgba(247, 248, 252, 0.4)",
"fab_text": "contrasted-text-dark",
- "fab-background": "rgba(255, 255, 255, 1)",
+ "fab-background": "rgba(255, 255, 255, 0.2)",
"input_background": "rgba(209, 214, 218, 1)",
"input_background_disable": "rgba(113, 157, 181, 0.04)",
"hint-background": "rgba(201, 201, 201, 1)",
@@ -58,24 +53,25 @@ qx.Theme.define("osparc.theme.ColorLight", {
"link": "rgba(10, 182, 255, 1)",
// shadows
- "bg-shadow": "background-main-5",
- "box-shadow": "rgba(0,0,0, 0.15)",
+ "bg-shadow": "background-main-5",
+ "box-shadow": "rgba(0, 0, 0, 0.15)",
"shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow",
// window
- "window-popup-background": "rgba(255, 255, 255, 1)",
+ // OM here
+ "window-popup-background": "rgba(225, 225, 225, 1)",
"window-caption-background": "background-main",
- "window-caption-background-active": "background-main-3",
+ "window-caption-background-active": "background-main-3",
"window-caption-text": "text",
"window-caption-text-active": "c12",
- "window-border": "background-main-2",
- "window-border-inner": "background-main-1",
+ "window-border": "background-main-2",
+ "window-border-inner": "background-main-1",
// material-button
- "material-button-background": "fab-background",
- "material-button-background-disabled": "default-button-disabled-background",
- "material-button-background-hovered": "default-button-hover-background",
- "material-button-background-pressed": "default-button-active-background",
+ "material-button-background": "fab-background",
+ "material-button-background-disabled": "default-button-disabled-background",
+ "material-button-background-hovered": "default-button-hover-background",
+ "material-button-background-pressed": "default-button-active-background",
"material-button-text-disabled": "default-button-disabled-background",
"material-button-text": "default-button-text-outline",
@@ -88,43 +84,43 @@ qx.Theme.define("osparc.theme.ColorLight", {
// backgrounds
"background-selected": "default-button-background",
- "background-selected-disabled": "default-button-disabled",
- "background-selected-dark": "product-color",
+ "background-selected-disabled": "default-button-disabled",
+ "background-selected-dark": "product-color",
"background-disabled": "background-main",
- "background-disabled-checked": "background-main-1",
+ "background-disabled-checked": "background-main-1",
"background-pane": "background-main",
// tabview
"tabview-unselected": "c14",
- "tabview-button-border": "c14",
+ "tabview-button-border": "product-color",
"tabview-label-active-disabled": "c10",
"tabview-pane-background": "transparent",
"tabview-button-background": "transparent",
// scrollbar
- "scrollbar-passive": "background-main-4",
- "scrollbar-active": "background-main-5",
+ "scrollbar-passive": "background-main-4",
+ "scrollbar-active": "background-main-5",
// form
- "button": "background-main-4",
- "button-border": "background-main-5",
+ "button": "background-main-4",
+ "button-border": "background-main-5",
"button-border-hovered": "c07",
- "button-box": "background-main-3",
- "button-box-pressed": "background-main-4",
+ "button-box": "background-main-3",
+ "button-box-pressed": "background-main-4",
"border-lead": "c07",
// group box
- "white-box-border": "background-main-2",
+ "white-box-border": "background-main-2",
// borders
// 'border-main' is an alias of 'background-selected' (compatibility reasons)
- "border": "background-main-3",
+ "border": "background-main-3",
"border-focused": "c09",
"border-invalid": "failed-red",
"border-disabled": "background-main",
// separator
- "border-separator": "fab-background",
+ "border-separator": "background-main-3",
// tooltip
"tooltip": "flash_message_bg",
@@ -135,13 +131,13 @@ qx.Theme.define("osparc.theme.ColorLight", {
"table-header": "background-main",
"table-header-foreground": "c09",
"table-header-border": "c07",
- "table-focus-indicator": "background-main-5",
+ "table-focus-indicator": "background-main-5",
// used in table code
"table-header-cell": "background-main",
- "table-row-background-focused-selected": "background-main-4",
- "table-row-background-focused": "background-main-3",
- "table-row-background-selected": "background-main-4",
+ "table-row-background-focused-selected": "background-main-4",
+ "table-row-background-focused": "background-main-3",
+ "table-row-background-selected": "background-main-4",
"table-row-background-even": "background-main",
"table-row-background-odd": "background-main",
@@ -157,17 +153,19 @@ qx.Theme.define("osparc.theme.ColorLight", {
"progressive-table-header": "c08",
"progressive-table-row-background-even": "background-main",
"progressive-table-row-background-odd": "background-main",
- "progressive-progressbar-background": "background-main",
+ "progressive-progressbar-background": "background-main",
"progressive-progressbar-indicator-done": "background-main",
- "progressive-progressbar-indicator-undone": "background-main-1",
- "progressive-progressbar-percent-background": "background-main",
- "progressive-progressbar-percent-text": "background-main-1",
+ "progressive-progressbar-indicator-undone": "background-main-1",
+ "progressive-progressbar-percent-background": "background-main",
+ "progressive-progressbar-percent-text": "background-main-1",
// OSPARC
"workbench-edge-comp-active": "#888888",
"workbench-edge-api-active": "#444444",
"workbench-start-hint": "#AFAFAF",
+ "workbench-view-navbar": "c02",
+ "workbench-view-splitter": "background-main-3",
"node-background": "rgba(113, 157, 181, 0.35)",
"node-selected-background": "strong-main",
diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js
index 180de5bb2cb..7cf74384589 100644
--- a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js
+++ b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js
@@ -33,12 +33,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
this.setNode(node);
},
- events: {
- "versionChanged": "qx.event.type.Event",
- "bootModeChanged": "qx.event.type.Event",
- "limitsChanged": "qx.event.type.Event"
- },
-
properties: {
node: {
check: "osparc.data.model.Node",
@@ -74,7 +68,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
(node.isUpdatable() || node.isDeprecated() || node.isRetired())
) {
const lifeCycleView = new osparc.node.LifeCycleView(node);
- node.addListener("versionChanged", () => this.fireEvent("versionChanged"));
sections.push(lifeCycleView);
showStartStopButton = true;
@@ -83,7 +76,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
// Boot Options
if (node.hasBootModes()) {
const bootOptionsView = new osparc.node.BootOptionsView(node);
- node.addListener("bootModeChanged", () => this.fireEvent("bootModeChanged"));
sections.push(bootOptionsView);
showStartStopButton = true;
@@ -95,7 +87,6 @@ qx.Class.define("osparc.widget.NodeOptions", {
(node.isComputational() || node.isDynamic())
) {
const updateResourceLimitsView = new osparc.node.UpdateResourceLimitsView(node);
- node.addListener("limitsChanged", () => this.fireEvent("limitsChanged"));
sections.push(updateResourceLimitsView);
showStartStopButton |= node.isDynamic();
diff --git a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js
index 173b3689524..e733be3b6bc 100644
--- a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js
+++ b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js
@@ -172,7 +172,7 @@ qx.Class.define("osparc.workbench.DiskUsageIndicator", {
toolTipText += this.tr("Data storage: ") + osparc.utils.Utils.bytesToSize(diskVolsUsage.free) + "
";
toolTipText += this.tr("I/O storage: ") + osparc.utils.Utils.bytesToSize(diskHostUsage.free) + "
";
}
- const bgColor = qx.theme.manager.Color.getInstance().resolve("tab_navigation_bar_background_color");
+ const bgColor = qx.theme.manager.Color.getInstance().resolve("workbench-view-navbar");
const color2 = qx.theme.manager.Color.getInstance().resolve("progressive-progressbar-background");
indicator.getContentElement().setStyles({
"background-color": bgColor,
From d9fb9d26bea8142c588650aae212224e1686b4fe Mon Sep 17 00:00:00 2001
From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com>
Date: Wed, 13 Nov 2024 13:24:06 +0100
Subject: [PATCH 10/17] =?UTF-8?q?=F0=9F=90=9B=20folder=20full=20search=20a?=
=?UTF-8?q?dding=20text=20query=20parameter=20(#6716)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
api/specs/web-server/_folders.py | 1 +
.../api/v0/openapi.yaml | 6 ++++++
.../folders/_folders_api.py | 3 +++
.../folders/_folders_db.py | 3 +++
.../folders/_folders_handlers.py | 1 +
.../simcore_service_webserver/folders/_models.py | 16 +++++++++++++++-
.../04/folders/test_folders__full_search.py | 8 ++++++++
7 files changed, 37 insertions(+), 1 deletion(-)
diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py
index 25eecea5cd0..ef5e29ac85d 100644
--- a/api/specs/web-server/_folders.py
+++ b/api/specs/web-server/_folders.py
@@ -69,6 +69,7 @@ async def list_folders(
)
async def list_folders_full_search(
params: Annotated[PageQueryParameters, Depends()],
+ text: str | None = None,
order_by: Annotated[
Json,
Query(
diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
index 40d0841c65a..860d9869218 100644
--- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
+++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
@@ -2697,6 +2697,12 @@ paths:
summary: List Folders Full Search
operationId: list_folders_full_search
parameters:
+ - required: false
+ schema:
+ title: Text
+ type: string
+ name: text
+ in: query
- description: Order by field (modified_at|name|description) and direction (asc|desc).
The default sorting order is ascending.
required: false
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
index a791a65c715..2ef9818f431 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py
@@ -172,6 +172,7 @@ async def list_folders(
else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE)
),
filter_trashed=trashed,
+ filter_by_text=None,
offset=offset,
limit=limit,
order_by=order_by,
@@ -199,6 +200,7 @@ async def list_folders_full_search(
app: web.Application,
user_id: UserID,
product_name: ProductName,
+ text: str | None,
trashed: bool | None,
offset: NonNegativeInt,
limit: int,
@@ -213,6 +215,7 @@ async def list_folders_full_search(
folder_query=FolderQuery(folder_scope=FolderScope.ALL),
workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL),
filter_trashed=trashed,
+ filter_by_text=text,
offset=offset,
limit=limit,
order_by=order_by,
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
index 0af9d36dadf..f4e27fa3a7a 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
@@ -113,6 +113,7 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches
workspace_query: WorkspaceQuery,
# attribute filters
filter_trashed: bool | None,
+ filter_by_text: str | None,
# pagination
offset: NonNegativeInt,
limit: int,
@@ -199,6 +200,8 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches
else:
assert folder_query.folder_scope == FolderScope.ROOT # nosec
attributes_filters.append(folders_v2.c.parent_folder_id.is_(None))
+ if filter_by_text:
+ attributes_filters.append(folders_v2.c.name.ilike(f"%{filter_by_text}%"))
###
# Combined
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
index 7050205bd7d..b1a01ef61aa 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py
@@ -119,6 +119,7 @@ async def list_folders_full_search(request: web.Request):
app=request.app,
user_id=req_ctx.user_id,
product_name=req_ctx.product_name,
+ text=query_params.text,
trashed=query_params.filters.trashed,
offset=query_params.offset,
limit=query_params.limit,
diff --git a/services/web/server/src/simcore_service_webserver/folders/_models.py b/services/web/server/src/simcore_service_webserver/folders/_models.py
index 5e48f46fa37..899514a271b 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_models.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_models.py
@@ -6,7 +6,10 @@
from models_library.rest_ordering import OrderBy, OrderDirection
from models_library.rest_pagination import PageQueryParameters
from models_library.users import UserID
-from models_library.utils.common_validators import null_or_none_str_to_none_validator
+from models_library.utils.common_validators import (
+ empty_str_to_none_pre_validator,
+ null_or_none_str_to_none_validator,
+)
from models_library.workspaces import WorkspaceID
from pydantic import BaseModel, Extra, Field, Json, validator
from servicelib.aiohttp.requests_validation import RequestParams, StrictRequestParams
@@ -88,6 +91,17 @@ class Config:
class FolderListFullSearchWithJsonStrQueryParams(
PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters]
):
+ text: str | None = Field(
+ default=None,
+ description="Multi column full text search, across all folders and workspaces",
+ max_length=100,
+ example="My Project",
+ )
+
+ _empty_is_none = validator("text", allow_reuse=True, pre=True)(
+ empty_str_to_none_pre_validator
+ )
+
class Config:
extra = Extra.forbid
diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
index b9da926543e..74126da042f 100644
--- a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
+++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py
@@ -103,6 +103,14 @@ async def test_folders_full_search(
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 3
+ # list full folder search with specific text
+ url = client.app.router["list_folders_full_search"].url_for()
+ query_parameters = {"text": "My subfolder"}
+ url_with_query = url.with_query(**query_parameters)
+ resp = await client.get(f"{url_with_query}")
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
+
# Create new user
async with LoggedUser(client) as new_logged_user:
# list full folder search
From 0718e142676741607a38d6fc99c78aaef760577c Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Wed, 13 Nov 2024 15:04:52 +0100
Subject: [PATCH 11/17] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Enh:=20``:search?=
=?UTF-8?q?``=20also=20``/folders``=20(#6713)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../osparc/dashboard/FolderButtonItem.js | 42 +++++++---
.../osparc/dashboard/ResourceBrowserBase.js | 12 +++
.../dashboard/ResourceContainerManager.js | 2 +
.../class/osparc/dashboard/StudyBrowser.js | 81 ++++++++++++-------
.../source/class/osparc/data/Resources.js | 8 +-
.../source/class/osparc/store/Folders.js | 51 ++++++++++--
.../client/source/class/osparc/store/Store.js | 6 ++
7 files changed, 155 insertions(+), 47 deletions(-)
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js
index 526f7032c27..0971a7d4990 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js
@@ -46,7 +46,8 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", {
"folderSelected": "qx.event.type.Data",
"folderUpdated": "qx.event.type.Data",
"moveFolderToRequested": "qx.event.type.Data",
- "deleteFolderRequested": "qx.event.type.Data"
+ "deleteFolderRequested": "qx.event.type.Data",
+ "changeContext": "qx.event.type.Data",
},
properties: {
@@ -186,19 +187,38 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", {
position: "bottom-right"
});
- const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12");
- editButton.addListener("execute", () => this.__editFolder(), this);
- menu.add(editButton);
+ const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext();
+ if (
+ studyBrowserContext === "search" ||
+ studyBrowserContext === "studiesAndFolders"
+ ) {
+ const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12");
+ editButton.addListener("execute", () => this.__editFolder(), this);
+ menu.add(editButton);
+
+ if (studyBrowserContext === "search") {
+ const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12");
+ openLocationButton.addListener("execute", () => {
+ const folder = this.getFolder();
+ this.fireDataEvent("changeContext", {
+ context: "studiesAndFolders",
+ workspaceId: folder.getWorkspaceId(),
+ folderId: folder.getParentFolderId(),
+ });
+ }, this);
+ menu.add(openLocationButton);
+ }
- const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12");
- moveToButton.addListener("execute", () => this.fireDataEvent("moveFolderToRequested", this.getFolderId()), this);
- menu.add(moveToButton);
+ const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12");
+ moveToButton.addListener("execute", () => this.fireDataEvent("moveFolderToRequested", this.getFolderId()), this);
+ menu.add(moveToButton);
- menu.addSeparator();
+ menu.addSeparator();
- const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12");
- deleteButton.addListener("execute", () => this.__deleteFolderRequested(), this);
- menu.add(deleteButton);
+ const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12");
+ deleteButton.addListener("execute", () => this.__deleteFolderRequested(), this);
+ menu.add(deleteButton);
+ }
menuButton.setMenu(menu);
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
index 31524310535..a80672bd3cd 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
@@ -280,6 +280,14 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
const workspaceId = e.getData();
this._workspaceSelected(workspaceId);
}, this);
+ resourcesContainer.addListener("changeContext", e => {
+ const {
+ context,
+ workspaceId,
+ folderId,
+ } = e.getData();
+ this._changeContext(context, workspaceId, folderId);
+ }, this);
resourcesContainer.addListener("workspaceUpdated", e => this._workspaceUpdated(e.getData()));
resourcesContainer.addListener("deleteWorkspaceRequested", e => this._deleteWorkspaceRequested(e.getData()));
@@ -479,6 +487,10 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
throw new Error("Abstract method called!");
},
+ _changeContext: function(context, workspaceId, folderId) {
+ throw new Error("Abstract method called!");
+ },
+
_folderSelected: function(folderId) {
throw new Error("Abstract method called!");
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js
index 187f6b441d3..b28b5d89a04 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js
@@ -79,6 +79,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
"workspaceSelected": "qx.event.type.Data",
"workspaceUpdated": "qx.event.type.Data",
"deleteWorkspaceRequested": "qx.event.type.Data",
+ "changeContext": "qx.event.type.Data",
},
statics: {
@@ -419,6 +420,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", {
"folderUpdated",
"moveFolderToRequested",
"deleteFolderRequested",
+ "changeContext",
].forEach(eName => card.addListener(eName, e => this.fireDataEvent(eName, e.getData())));
return card;
},
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
index 288290b06df..b82286a5f0c 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -171,17 +171,30 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
if (
!osparc.auth.Manager.getInstance().isLoggedIn() ||
!osparc.utils.DisabledPlugins.isFoldersEnabled() ||
- this.getCurrentContext() !== "studiesAndFolders" ||
+ this.getCurrentContext() === "workspaces" ||
this.__loadingFolders
) {
return;
}
- const workspaceId = this.getCurrentWorkspaceId();
- const folderId = this.getCurrentFolderId();
this.__loadingFolders = true;
+ let request = null;
+ switch (this.getCurrentContext()) {
+ case "search": {
+ const filterData = this._searchBarFilter.getFilterData();
+ const text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid
+ request = osparc.store.Folders.getInstance().searchFolders(text, this.getOrderBy());
+ break;
+ }
+ case "studiesAndFolders": {
+ const workspaceId = this.getCurrentWorkspaceId();
+ const folderId = this.getCurrentFolderId();
+ request = osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy());
+ break;
+ }
+ }
this.__setFoldersToList([]);
- osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy())
+ request
.then(folders => {
this.__setFoldersToList(folders);
})
@@ -384,7 +397,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
_workspaceSelected: function(workspaceId) {
- this.__changeContext("studiesAndFolders", workspaceId, null);
+ this._changeContext("studiesAndFolders", workspaceId, null);
},
_workspaceUpdated: function() {
@@ -444,7 +457,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
_folderSelected: function(folderId) {
- this.__changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId);
+ this._changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId);
},
_folderUpdated: function() {
@@ -653,17 +666,23 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const requestParams = {};
requestParams.orderBy = JSON.stringify(this.getOrderBy());
- const filterData = this._searchBarFilter.getFilterData();
- // Use the ``search`` functionality only if the user types some text
- // tags should only be used to filter the current context (search context ot workspace/folder context)
- if (filterData.text) {
- requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid
- requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : "";
- return requestParams;
+ switch (this.getCurrentContext()) {
+ case "studiesAndFolders":
+ requestParams.workspaceId = this.getCurrentWorkspaceId();
+ requestParams.folderId = this.getCurrentFolderId();
+ break;
+ case "search": {
+ // Use the ``search`` functionality only if the user types some text
+ // tags should only be used to filter the current context (search context ot workspace/folder context)
+ const filterData = this._searchBarFilter.getFilterData();
+ if (filterData.text) {
+ requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid
+ requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : "";
+ }
+ break;
+ }
}
- requestParams.workspaceId = this.getCurrentWorkspaceId();
- requestParams.folderId = this.getCurrentFolderId();
return requestParams;
},
@@ -688,10 +707,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
resolveWResponse: true
};
- if ("text" in requestParams) {
- return osparc.data.Resources.fetch("studies", "getPageSearch", params, options);
+ let request = null;
+ switch (this.getCurrentContext()) {
+ case "search":
+ request = osparc.data.Resources.fetch("studies", "getPageSearch", params, options);
+ break;
+ case "studiesAndFolders":
+ request = osparc.data.Resources.fetch("studies", "getPage", params, options);
+ break;
}
- return osparc.data.Resources.fetch("studies", "getPage", params, options);
+ return request;
},
invalidateStudies: function() {
@@ -886,10 +911,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
this._resourcesContainer.addListener("changeSelection", e => {
+ const currentContext = this.getCurrentContext();
const selection = e.getData();
studiesMoveButton.set({
- visibility: selection.length ? "visible" : "excluded",
+ visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded",
label: selection.length > 1 ? this.tr("Move selected")+" ("+selection.length+")" : this.tr("Move")
});
@@ -910,7 +936,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
header.addListener("locationChanged", () => {
const workspaceId = header.getCurrentWorkspaceId();
const folderId = header.getCurrentFolderId();
- this.__changeContext("studiesAndFolders", workspaceId, folderId);
+ this._changeContext("studiesAndFolders", workspaceId, folderId);
}, this);
const workspacesAndFoldersTree = this._resourceFilter.getWorkspacesAndFoldersTree();
@@ -918,27 +944,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const context = e.getData();
const workspaceId = context["workspaceId"];
if (workspaceId === -1) {
- this.__changeContext("workspaces");
+ this._changeContext("workspaces");
} else {
const folderId = context["folderId"];
- this.__changeContext("studiesAndFolders", workspaceId, folderId);
+ this._changeContext("studiesAndFolders", workspaceId, folderId);
}
}, this);
this._searchBarFilter.addListener("filterChanged", e => {
const filterData = e.getData();
if (filterData.text) {
- this.__changeContext("search");
+ this._changeContext("search");
} else {
const workspaceId = this.getCurrentWorkspaceId();
const folderId = this.getCurrentFolderId();
- this.__changeContext("studiesAndFolders", workspaceId, folderId);
+ this._changeContext("studiesAndFolders", workspaceId, folderId);
}
});
}
},
- __changeContext: function(context, workspaceId = null, folderId = null) {
+ _changeContext: function(context, workspaceId = null, folderId = null) {
if (osparc.utils.DisabledPlugins.isFoldersEnabled()) {
if (
context !== "search" && // reload studies for a new search
@@ -950,6 +976,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return;
}
+ osparc.store.Store.getInstance().setStudyBrowserContext(context);
this.set({
currentContext: context,
currentWorkspaceId: workspaceId,
@@ -962,7 +989,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
this._resourcesContainer.setResourcesToList([]);
if (context === "search") {
- this.__setFoldersToList([]);
+ this.__reloadFolders();
this.__reloadStudies();
} else if (context === "workspaces") {
this._searchBarFilter.resetFilters();
@@ -1342,7 +1369,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__getOpenLocationMenuButton: function(studyData) {
const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12");
openLocationButton.addListener("execute", () => {
- this.__changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]);
+ this._changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]);
}, this);
return openLocationButton;
},
diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js
index 5484107fd96..007ba33eddd 100644
--- a/services/static-webserver/client/source/class/osparc/data/Resources.js
+++ b/services/static-webserver/client/source/class/osparc/data/Resources.js
@@ -301,6 +301,11 @@ qx.Class.define("osparc.data.Resources", {
method: "GET",
url: statics.API + "/folders?workspace_id={workspaceId}&folder_id={folderId}&offset={offset}&limit={limit}&order_by={orderBy}"
},
+ getPageSearch: {
+ useCache: false,
+ method: "GET",
+ url: statics.API + "/folders:search?offset={offset}&limit={limit}&text={text}&order_by={orderBy}"
+ },
getOne: {
method: "GET",
url: statics.API + "/folders/{folderId}"
@@ -1368,7 +1373,7 @@ qx.Class.define("osparc.data.Resources", {
});
},
- getAllPages: function(resource, params = {}) {
+ getAllPages: function(resource, params = {}, endpoint = "getPage") {
return new Promise((resolve, reject) => {
let resources = [];
let offset = 0;
@@ -1377,7 +1382,6 @@ qx.Class.define("osparc.data.Resources", {
}
params["url"]["offset"] = offset;
params["url"]["limit"] = 10;
- const endpoint = "getPage";
const options = {
resolveWResponse: true
};
diff --git a/services/static-webserver/client/source/class/osparc/store/Folders.js b/services/static-webserver/client/source/class/osparc/store/Folders.js
index 16385de935c..727896c28ef 100644
--- a/services/static-webserver/client/source/class/osparc/store/Folders.js
+++ b/services/static-webserver/client/source/class/osparc/store/Folders.js
@@ -31,6 +31,17 @@ qx.Class.define("osparc.store.Folders", {
"folderMoved": "qx.event.type.Data",
},
+ statics: {
+ curateOrderBy: function(orderBy) {
+ const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy);
+ if (curatedOrderBy.field !== "name") {
+ // only "modified_at" and "name" supported
+ curatedOrderBy.field = "modified_at";
+ }
+ return curatedOrderBy;
+ },
+ },
+
members: {
foldersCached: null,
@@ -40,7 +51,7 @@ qx.Class.define("osparc.store.Folders", {
orderBy = {
field: "modified_at",
direction: "desc"
- }
+ },
) {
if (osparc.auth.Data.getInstance().isGuest()) {
return new Promise(resolve => {
@@ -48,12 +59,7 @@ qx.Class.define("osparc.store.Folders", {
});
}
- const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy);
- if (curatedOrderBy.field !== "name") {
- // only "modified_at" and "name" supported
- curatedOrderBy.field = "modified_at";
- }
-
+ const curatedOrderBy = this.self().curateOrderBy(orderBy);
const params = {
url: {
workspaceId,
@@ -72,6 +78,37 @@ qx.Class.define("osparc.store.Folders", {
});
},
+ searchFolders: function(
+ text,
+ orderBy = {
+ field: "modified_at",
+ direction: "desc"
+ },
+ ) {
+ if (osparc.auth.Data.getInstance().isGuest()) {
+ return new Promise(resolve => {
+ resolve([]);
+ });
+ }
+
+ const curatedOrderBy = this.self().curateOrderBy(orderBy);
+ const params = {
+ url: {
+ text,
+ orderBy: JSON.stringify(curatedOrderBy),
+ }
+ };
+ return osparc.data.Resources.getInstance().getAllPages("folders", params, "getPageSearch")
+ .then(foldersData => {
+ const folders = [];
+ foldersData.forEach(folderData => {
+ const folder = this.__addToCache(folderData);
+ folders.push(folder);
+ });
+ return folders;
+ });
+ },
+
postFolder: function(name, parentFolderId = null, workspaceId = null) {
const newFolderData = {
name,
diff --git a/services/static-webserver/client/source/class/osparc/store/Store.js b/services/static-webserver/client/source/class/osparc/store/Store.js
index 0e015ed7811..89ccc5e51a0 100644
--- a/services/static-webserver/client/source/class/osparc/store/Store.js
+++ b/services/static-webserver/client/source/class/osparc/store/Store.js
@@ -66,6 +66,12 @@ qx.Class.define("osparc.store.Store", {
init: null,
nullable: true
},
+ studyBrowserContext: {
+ check: ["studiesAndFolders", "workspaces", "search"],
+ init: "studiesAndFolders",
+ nullable: false,
+ event: "changeStudyBrowserContext",
+ },
studies: {
check: "Array",
init: []
From d5dca964611fdf683e684c6eab61a329722a2388 Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Wed, 13 Nov 2024 15:16:03 +0100
Subject: [PATCH 12/17] =?UTF-8?q?=F0=9F=8E=A8=F0=9F=90=9B=20Enh/fix:=20fro?=
=?UTF-8?q?ntend=20knows=20about=20``trashedAt``=20(#6717)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../source/class/osparc/data/model/Folder.js | 9 ++++++++-
.../client/source/class/osparc/data/model/Study.js | 14 +++++++++++---
.../client/source/class/osparc/store/Folders.js | 2 ++
3 files changed, 21 insertions(+), 4 deletions(-)
diff --git a/services/static-webserver/client/source/class/osparc/data/model/Folder.js b/services/static-webserver/client/source/class/osparc/data/model/Folder.js
index 1dd99d015a2..b8b9eb03b21 100644
--- a/services/static-webserver/client/source/class/osparc/data/model/Folder.js
+++ b/services/static-webserver/client/source/class/osparc/data/model/Folder.js
@@ -37,6 +37,7 @@ qx.Class.define("osparc.data.model.Folder", {
owner: folderData.owner,
createdAt: new Date(folderData.createdAt),
lastModified: new Date(folderData.modifiedAt),
+ trashedAt: folderData.trashedAt ? new Date(folderData.trashedAt) : this.getTrashedAt(),
});
},
@@ -95,7 +96,13 @@ qx.Class.define("osparc.data.model.Folder", {
nullable: true,
init: null,
event: "changeLastModified"
- }
+ },
+
+ trashedAt: {
+ check: "Date",
+ nullable: true,
+ init: null,
+ },
},
statics: {
diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js
index 598e0575d22..ab178aca669 100644
--- a/services/static-webserver/client/source/class/osparc/data/model/Study.js
+++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js
@@ -58,7 +58,8 @@ qx.Class.define("osparc.data.model.Study", {
state: studyData.state || this.getState(),
quality: studyData.quality || this.getQuality(),
permalink: studyData.permalink || this.getPermalink(),
- dev: studyData.dev || this.getDev()
+ dev: studyData.dev || this.getDev(),
+ trashedAt: studyData.trashedAt ? new Date(studyData.trashedAt) : this.getTrashedAt(),
});
const wbData = studyData.workbench || this.getWorkbench();
@@ -209,7 +210,13 @@ qx.Class.define("osparc.data.model.Study", {
nullable: true,
event: "changeReadOnly",
init: true
- }
+ },
+
+ trashedAt: {
+ check: "Date",
+ nullable: true,
+ init: null,
+ },
// ------ ignore for serializing ------
},
@@ -218,7 +225,8 @@ qx.Class.define("osparc.data.model.Study", {
"permalink",
"state",
"pipelineRunning",
- "readOnly"
+ "readOnly",
+ "trashedAt",
],
IgnoreModelizationProps: [
diff --git a/services/static-webserver/client/source/class/osparc/store/Folders.js b/services/static-webserver/client/source/class/osparc/store/Folders.js
index 727896c28ef..7deb66618bb 100644
--- a/services/static-webserver/client/source/class/osparc/store/Folders.js
+++ b/services/static-webserver/client/source/class/osparc/store/Folders.js
@@ -178,6 +178,8 @@ qx.Class.define("osparc.store.Folders", {
folder.set("createdAt", new Date(folderData["createdAt"]));
} else if (key === "modifiedAt") {
folder.set("lastModified", new Date(folderData["modifiedAt"]));
+ } else if (key === "trashedAt") {
+ folder.set("trashedAt", new Date(folderData["trashedAt"]));
} else {
folder.set(key, folderData[key]);
}
From e6e2c705c8e4ac4a6b62a73668ef57f720f55284 Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Wed, 13 Nov 2024 16:12:24 +0100
Subject: [PATCH 13/17] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20TIP:=20New=20?=
=?UTF-8?q?plan=20after=20creating=20its=20template=20(#6710)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../class/osparc/dashboard/StudyBrowser.js | 45 ++++++++++---------
.../source/class/osparc/info/StudyLarge.js | 4 ++
2 files changed, 28 insertions(+), 21 deletions(-)
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
index b82286a5f0c..ceaee03b3ac 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -746,7 +746,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
break;
case "tis":
case "tiplite":
- this.__addTIPPlusButtons();
+ this.__addTIPPlusButton();
break;
case "s4l":
case "s4lacad":
@@ -770,24 +770,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
this._resourcesContainer.addNonResourceCard(newStudyBtn);
},
- __addTIPPlusButtons: function() {
- osparc.data.Resources.get("templates")
- .then(templates => {
- if (templates) {
- osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json")
- .then(newStudiesData => {
- const product = osparc.product.Utils.getProductName()
- if (product in newStudiesData) {
- const mode = this._resourcesContainer.getMode();
- const title = this.tr("New Plan");
- const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title);
- newStudyBtn.setCardKey("new-study");
- newStudyBtn.subscribeToFilterGroup("searchBarFilter");
- osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn");
- this._resourcesContainer.addNonResourceCard(newStudyBtn);
- newStudyBtn.addListener("execute", () => {
- newStudyBtn.setValue(false);
+ __addTIPPlusButton: function() {
+ const mode = this._resourcesContainer.getMode();
+ const title = this.tr("New Plan");
+ const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title);
+ newStudyBtn.setCardKey("new-study");
+ newStudyBtn.subscribeToFilterGroup("searchBarFilter");
+ osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn");
+ this._resourcesContainer.addNonResourceCard(newStudyBtn);
+ newStudyBtn.setEnabled(false);
+ osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json")
+ .then(newStudiesData => {
+ const product = osparc.product.Utils.getProductName()
+ if (product in newStudiesData) {
+ newStudyBtn.setEnabled(true);
+
+ newStudyBtn.addListener("execute", () => {
+ newStudyBtn.setValue(false);
+ osparc.data.Resources.get("templates")
+ .then(templates => {
+ if (templates) {
const newStudies = new osparc.dashboard.NewStudies(newStudiesData[product]);
newStudies.addListener("templatesLoaded", () => {
newStudies.setGroupBy("category");
@@ -806,9 +809,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
osparc.utils.Utils.setIdToWidget(win, "newStudiesWindow");
});
- });
- }
- });
+ }
+ });
+ });
}
});
},
diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js
index 3351ed0fc96..5709bfd70a2 100644
--- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js
+++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js
@@ -324,6 +324,10 @@ qx.Class.define("osparc.info.StudyLarge", {
studyData["resourceType"] = this.__isTemplate ? "template" : "study";
this.fireDataEvent("updateStudy", studyData);
qx.event.message.Bus.getInstance().dispatchByName("updateStudy", studyData);
+ if (this.__isTemplate) {
+ // reload templates
+ osparc.data.Resources.get("templates", {}, false)
+ }
})
.catch(err => {
console.error(err);
From a4b7c7a67dea15a7bc40efb8afb08a93b81c0186 Mon Sep 17 00:00:00 2001
From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com>
Date: Wed, 13 Nov 2024 18:54:09 +0100
Subject: [PATCH 14/17] =?UTF-8?q?=F0=9F=90=9B=20Fix=20listing=20folders=20?=
=?UTF-8?q?in=20workspace=20(#6718)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Co-authored-by: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
---
.../folders/_folders_db.py | 6 ++
...t_workspaces__folders_and_projects_crud.py | 95 +++++++++++++++++++
2 files changed, 101 insertions(+)
diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
index f4e27fa3a7a..e2992d111ee 100644
--- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
+++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py
@@ -178,6 +178,12 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches
& (folders_v2.c.user_id.is_(None))
)
)
+
+ if workspace_query.workspace_scope == WorkspaceScope.SHARED:
+ shared_workspace_query = shared_workspace_query.where(
+ folders_v2.c.workspace_id == workspace_query.workspace_id
+ )
+
else:
shared_workspace_query = None
diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py
index c95aebe6fdd..717de9303fd 100644
--- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py
+++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py
@@ -365,3 +365,98 @@ async def test_workspaces_delete_folders(
resp = await client.get(url)
data, _ = await assert_status(resp, status.HTTP_200_OK)
assert len(data) == 0
+
+
+@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)])
+async def test_listing_folders_and_projects_in_workspace__multiple_workspaces_created(
+ client: TestClient,
+ logged_user: UserInfoDict,
+ user_project: ProjectDict,
+ expected: HTTPStatus,
+ mock_catalog_api_get_services_for_user_in_product: MockerFixture,
+ fake_project: ProjectDict,
+ workspaces_clean_db: None,
+):
+ assert client.app
+
+ # create a new workspace
+ url = client.app.router["create_workspace"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "My first workspace",
+ "description": "Custom description",
+ "thumbnail": None,
+ },
+ )
+ added_workspace_1, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # Create project in workspace
+ project_data = deepcopy(fake_project)
+ project_data["workspace_id"] = f"{added_workspace_1['workspaceId']}"
+ project = await create_project(
+ client.app,
+ project_data,
+ user_id=logged_user["id"],
+ product_name="osparc",
+ )
+
+ # Create folder in workspace
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "Original user folder",
+ "workspaceId": f"{added_workspace_1['workspaceId']}",
+ },
+ )
+ first_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # create a new workspace
+ url = client.app.router["create_workspace"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "My first workspace",
+ "description": "Custom description",
+ "thumbnail": None,
+ },
+ )
+ added_workspace_2, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # Create project in workspace
+ project_data = deepcopy(fake_project)
+ project_data["workspace_id"] = f"{added_workspace_2['workspaceId']}"
+ project = await create_project(
+ client.app,
+ project_data,
+ user_id=logged_user["id"],
+ product_name="osparc",
+ )
+
+ # Create folder in workspace
+ url = client.app.router["create_folder"].url_for()
+ resp = await client.post(
+ url.path,
+ json={
+ "name": "Original user folder",
+ "workspaceId": f"{added_workspace_2['workspaceId']}",
+ },
+ )
+ first_folder, _ = await assert_status(resp, status.HTTP_201_CREATED)
+
+ # List projects in workspace 1
+ base_url = client.app.router["list_projects"].url_for()
+ url = base_url.with_query({"workspace_id": f"{added_workspace_1['workspaceId']}"})
+ resp = await client.get(url)
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
+
+ # List folders in workspace 1
+ base_url = client.app.router["list_folders"].url_for()
+ url = base_url.with_query(
+ {"workspace_id": f"{added_workspace_1['workspaceId']}", "folder_id": "null"}
+ )
+ resp = await client.get(url)
+ data, _ = await assert_status(resp, status.HTTP_200_OK)
+ assert len(data) == 1
From a44de5c91dc8a81bd9f43ea40f1bfbb77fd6c1d4 Mon Sep 17 00:00:00 2001
From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com>
Date: Thu, 14 Nov 2024 09:40:21 +0100
Subject: [PATCH 15/17] =?UTF-8?q?=E2=9C=A8=20instrument=20(opentelemetry)?=
=?UTF-8?q?=20httpx=20clients=20(#6715)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
packages/aws-library/requirements/_base.txt | 2 --
.../requirements/_base.txt | 27 +++++++++++++++
.../requirements/_test.txt | 3 +-
.../requirements/_tools.txt | 3 +-
.../service-library/requirements/_base.txt | 2 --
.../service-library/requirements/_fastapi.in | 1 +
.../service-library/requirements/_fastapi.txt | 6 ++++
.../servicelib/fastapi/http_client_thin.py | 8 ++++-
.../src/servicelib/fastapi/tracing.py | 6 ++++
.../tests/fastapi/test_http_client_thin.py | 30 +++++++++++-----
packages/simcore-sdk/requirements/_base.txt | 1 -
services/agent/requirements/_base.txt | 8 +++--
services/api-server/requirements/_base.txt | 7 +++-
.../core/application.py | 29 ++++++++++++----
.../services/catalog.py | 11 ++++--
.../services/director_v2.py | 6 +++-
.../services/storage.py | 11 ++++--
.../services/webserver.py | 13 +++++--
.../utils/client_base.py | 6 +++-
.../tests/unit/test_utils_client_base.py | 1 +
services/autoscaling/requirements/_base.txt | 8 +++--
services/autoscaling/requirements/_test.txt | 4 ---
services/catalog/requirements/_base.txt | 10 ++++--
.../core/application.py | 9 +++--
.../simcore_service_catalog/core/events.py | 7 ++--
.../services/director.py | 24 ++++++++++---
.../clusters-keeper/requirements/_base.txt | 8 +++--
.../clusters-keeper/requirements/_test.txt | 4 ---
services/dask-sidecar/requirements/_base.txt | 2 --
.../datcore-adapter/requirements/_base.txt | 8 +++--
services/director-v2/requirements/_base.txt | 7 +++-
.../cli/_client.py | 4 ++-
.../simcore_service_director_v2/cli/_core.py | 7 ++--
.../core/application.py | 25 ++++++++++----
.../modules/catalog.py | 27 ++++++++++-----
.../modules/director_v0.py | 34 ++++++++++++-------
.../modules/dynamic_services.py | 13 ++++---
.../dynamic_sidecar/api_client/_thin.py | 5 +++
.../modules/resource_usage_tracker_client.py | 3 ++
.../modules/storage.py | 26 +++++++++-----
...t_dynamic_sidecar_nodeports_integration.py | 11 ++++--
.../dynamic-scheduler/requirements/_base.txt | 10 ++++--
.../services/director_v2/_thin_client.py | 1 +
.../dynamic-sidecar/requirements/_base.txt | 7 +++-
services/efs-guardian/requirements/_base.txt | 10 ++++--
services/efs-guardian/requirements/_test.txt | 4 ---
services/invitations/requirements/_base.txt | 8 +++--
services/payments/requirements/_base.txt | 10 ++++--
.../services/payments_gateway.py | 3 ++
.../services/resource_usage_tracker.py | 3 ++
.../services/stripe.py | 3 ++
.../requirements/_base.txt | 10 ++++--
.../requirements/_test.txt | 4 ---
services/storage/requirements/_base.txt | 1 -
services/storage/requirements/_test.txt | 4 ---
services/web/server/requirements/_base.txt | 1 -
services/web/server/requirements/_test.txt | 1 -
tests/swarm-deploy/requirements/_test.txt | 27 ++++++++++++++-
58 files changed, 395 insertions(+), 139 deletions(-)
diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt
index 63c88ba0037..6caf09a9844 100644
--- a/packages/aws-library/requirements/_base.txt
+++ b/packages/aws-library/requirements/_base.txt
@@ -44,8 +44,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/_base.in
# -r requirements/_base.in
-async-timeout==4.0.3
- # via redis
attrs==24.2.0
# via
# aiohttp
diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt
index abc242615c5..634746a1298 100644
--- a/packages/notifications-library/requirements/_base.txt
+++ b/packages/notifications-library/requirements/_base.txt
@@ -16,6 +16,10 @@ attrs==24.2.0
# referencing
click==8.1.7
# via typer
+deprecated==1.2.14
+ # via
+ # opentelemetry-api
+ # opentelemetry-semantic-conventions
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
@@ -26,6 +30,8 @@ idna==3.10
# via
# email-validator
# yarl
+importlib-metadata==8.5.0
+ # via opentelemetry-api
jinja2==3.1.4
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -54,6 +60,19 @@ mdurl==0.1.2
# via markdown-it-py
multidict==6.1.0
# via yarl
+opentelemetry-api==1.28.1
+ # via
+ # opentelemetry-instrumentation
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-semantic-conventions
+opentelemetry-instrumentation==0.49b1
+ # via opentelemetry-instrumentation-asyncpg
+opentelemetry-instrumentation-asyncpg==0.49b1
+ # via -r requirements/../../../packages/postgres-database/requirements/_base.in
+opentelemetry-semantic-conventions==0.49b1
+ # via
+ # opentelemetry-instrumentation
+ # opentelemetry-instrumentation-asyncpg
orjson==3.10.7
# via
# -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt
@@ -61,6 +80,8 @@ orjson==3.10.7
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
# -c requirements/../../../requirements/constraints.txt
# -r requirements/../../../packages/models-library/requirements/_base.in
+packaging==24.2
+ # via opentelemetry-instrumentation
psycopg2-binary==2.9.9
# via sqlalchemy
pydantic==1.10.18
@@ -109,5 +130,11 @@ typing-extensions==4.12.2
# alembic
# pydantic
# typer
+wrapt==1.16.0
+ # via
+ # deprecated
+ # opentelemetry-instrumentation
yarl==1.12.1
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
+zipp==3.21.0
+ # via importlib-metadata
diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt
index 55a7d9b8ee8..e802554a901 100644
--- a/packages/notifications-library/requirements/_test.txt
+++ b/packages/notifications-library/requirements/_test.txt
@@ -28,8 +28,9 @@ mypy==1.12.0
# via sqlalchemy
mypy-extensions==1.0.0
# via mypy
-packaging==24.1
+packaging==24.2
# via
+ # -c requirements/_base.txt
# pytest
# pytest-sugar
pluggy==1.5.0
diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt
index 217752d687f..4a902da9cb2 100644
--- a/packages/notifications-library/requirements/_tools.txt
+++ b/packages/notifications-library/requirements/_tools.txt
@@ -38,8 +38,9 @@ mypy-extensions==1.0.0
# mypy
nodeenv==1.9.1
# via pre-commit
-packaging==24.1
+packaging==24.2
# via
+ # -c requirements/_base.txt
# -c requirements/_test.txt
# black
# build
diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt
index d53ce73a8c4..696dc496fcf 100644
--- a/packages/service-library/requirements/_base.txt
+++ b/packages/service-library/requirements/_base.txt
@@ -28,8 +28,6 @@ arrow==1.3.0
# via
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/_base.in
-async-timeout==4.0.3
- # via redis
attrs==24.2.0
# via
# aiohttp
diff --git a/packages/service-library/requirements/_fastapi.in b/packages/service-library/requirements/_fastapi.in
index 7b6a6bb2cf2..e11871af331 100644
--- a/packages/service-library/requirements/_fastapi.in
+++ b/packages/service-library/requirements/_fastapi.in
@@ -9,6 +9,7 @@
fastapi
httpx
opentelemetry-instrumentation-fastapi
+opentelemetry-instrumentation-httpx
prometheus-client
prometheus-fastapi-instrumentator
uvicorn
diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt
index 8a3aed37600..71c9d7cabce 100644
--- a/packages/service-library/requirements/_fastapi.txt
+++ b/packages/service-library/requirements/_fastapi.txt
@@ -47,23 +47,29 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-semantic-conventions
opentelemetry-instrumentation==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
opentelemetry-instrumentation-asgi==0.48b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/_fastapi.in
opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
prometheus-client==0.21.0
# via
# -r requirements/_fastapi.in
diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py
index e00e0d636a2..554ccb450ad 100644
--- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py
+++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py
@@ -8,6 +8,8 @@
from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response
from httpx._types import TimeoutTypes, URLTypes
from pydantic.errors import PydanticErrorMixin
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
+from settings_library.tracing import TracingSettings
from tenacity import RetryCallState
from tenacity.asyncio import AsyncRetrying
from tenacity.before_sleep import before_sleep_log
@@ -201,6 +203,7 @@ def __init__(
base_url: URLTypes | None = None,
default_http_client_timeout: TimeoutTypes | None = None,
extra_allowed_method_names: set[str] | None = None,
+ tracing_settings: TracingSettings | None,
) -> None:
_assert_public_interface(self, extra_allowed_method_names)
@@ -220,7 +223,10 @@ def __init__(
if default_http_client_timeout:
client_args["timeout"] = default_http_client_timeout
- super().__init__(client=AsyncClient(**client_args))
+ client = AsyncClient(**client_args)
+ if tracing_settings:
+ setup_httpx_client_tracing(client)
+ super().__init__(client=client)
async def __aenter__(self):
await self.setup_client()
diff --git a/packages/service-library/src/servicelib/fastapi/tracing.py b/packages/service-library/src/servicelib/fastapi/tracing.py
index b5179a8a5f6..36e9b06fa12 100644
--- a/packages/service-library/src/servicelib/fastapi/tracing.py
+++ b/packages/service-library/src/servicelib/fastapi/tracing.py
@@ -5,11 +5,13 @@
import logging
from fastapi import FastAPI
+from httpx import AsyncClient, Client
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
OTLPSpanExporter as OTLPSpanExporterHTTP,
)
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
+from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
@@ -121,3 +123,7 @@ def setup_tracing(
msg="Attempting to add requests opentelemetry autoinstrumentation...",
):
RequestsInstrumentor().instrument()
+
+
+def setup_httpx_client_tracing(client: AsyncClient | Client):
+ HTTPXClientInstrumentor.instrument_client(client)
diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py
index f98de720c33..8c052948f6d 100644
--- a/packages/service-library/tests/fastapi/test_http_client_thin.py
+++ b/packages/service-library/tests/fastapi/test_http_client_thin.py
@@ -71,7 +71,9 @@ def request_timeout() -> int:
@pytest.fixture
async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]:
- async with FakeThickClient(total_retry_interval=request_timeout) as client:
+ async with FakeThickClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ ) as client:
yield client
@@ -95,7 +97,9 @@ async def test_retry_on_errors(
test_url: AnyHttpUrl,
caplog_info_level: pytest.LogCaptureFixture,
) -> None:
- client = FakeThickClient(total_retry_interval=request_timeout)
+ client = FakeThickClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
with pytest.raises(ClientHttpError):
await client.get_provided_url(test_url)
@@ -119,7 +123,7 @@ async def raises_request_error(self) -> Response:
request=Request(method="GET", url=test_url),
)
- client = ATestClient(total_retry_interval=request_timeout)
+ client = ATestClient(total_retry_interval=request_timeout, tracing_settings=None)
with pytest.raises(ClientHttpError):
await client.raises_request_error()
@@ -145,7 +149,7 @@ async def raises_http_error(self) -> Response:
msg = "mock_http_error"
raise HTTPError(msg)
- client = ATestClient(total_retry_interval=request_timeout)
+ client = ATestClient(total_retry_interval=request_timeout, tracing_settings=None)
with pytest.raises(ClientHttpError):
await client.raises_http_error()
@@ -159,21 +163,25 @@ async def public_method_ok(self) -> Response: # type: ignore
"""this method will be ok even if no code is used"""
# OK
- OKTestClient(total_retry_interval=request_timeout)
+ OKTestClient(total_retry_interval=request_timeout, tracing_settings=None)
class FailWrongAnnotationTestClient(BaseThinClient):
async def public_method_wrong_annotation(self) -> None:
"""this method will raise an error"""
with pytest.raises(AssertionError, match="should return an instance"):
- FailWrongAnnotationTestClient(total_retry_interval=request_timeout)
+ FailWrongAnnotationTestClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
class FailNoAnnotationTestClient(BaseThinClient):
async def public_method_no_annotation(self):
"""this method will raise an error"""
with pytest.raises(AssertionError, match="should return an instance"):
- FailNoAnnotationTestClient(total_retry_interval=request_timeout)
+ FailNoAnnotationTestClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
async def test_expect_state_decorator(
@@ -197,7 +205,9 @@ async def get_wrong_state(self) -> Response:
respx_mock.get(url_get_200_ok).mock(return_value=Response(codes.OK))
respx_mock.get(get_wrong_state).mock(return_value=Response(codes.OK))
- test_client = ATestClient(total_retry_interval=request_timeout)
+ test_client = ATestClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
# OK
response = await test_client.get_200_ok()
@@ -218,7 +228,9 @@ async def test_retry_timeout_overwrite(
request_timeout: int,
caplog_info_level: pytest.LogCaptureFixture,
) -> None:
- client = FakeThickClient(total_retry_interval=request_timeout)
+ client = FakeThickClient(
+ total_retry_interval=request_timeout, tracing_settings=None
+ )
caplog_info_level.clear()
start = arrow.utcnow()
diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt
index 5eac02fa1ec..11be2af08e1 100644
--- a/packages/simcore-sdk/requirements/_base.txt
+++ b/packages/simcore-sdk/requirements/_base.txt
@@ -48,7 +48,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==24.2.0
diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt
index 59f29515fe5..a42027b8a00 100644
--- a/services/agent/requirements/_base.txt
+++ b/services/agent/requirements/_base.txt
@@ -38,8 +38,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==24.2.0
# via
# aiohttp
@@ -143,6 +141,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -161,12 +160,15 @@ opentelemetry-instrumentation==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -185,6 +187,7 @@ opentelemetry-semantic-conventions==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -192,6 +195,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.7
# via
diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt
index 92a441a0e25..02a3778eab2 100644
--- a/services/api-server/requirements/_base.txt
+++ b/services/api-server/requirements/_base.txt
@@ -74,7 +74,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -283,6 +282,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -306,6 +306,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiopg==0.48b0
@@ -320,6 +321,8 @@ opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -345,6 +348,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -352,6 +356,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py
index 04dcd397c28..3d67746deb7 100644
--- a/services/api-server/src/simcore_service_api_server/core/application.py
+++ b/services/api-server/src/simcore_service_api_server/core/application.py
@@ -82,19 +82,36 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI:
setup_rabbitmq(app)
+ if settings.API_SERVER_TRACING:
+ setup_tracing(app, settings.API_SERVER_TRACING, APP_NAME)
+
if settings.API_SERVER_WEBSERVER:
- webserver.setup(app, settings.API_SERVER_WEBSERVER)
- if app.state.settings.API_SERVER_TRACING:
- setup_tracing(app, app.state.settings.API_SERVER_TRACING, APP_NAME)
+ webserver.setup(
+ app,
+ settings.API_SERVER_WEBSERVER,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
if settings.API_SERVER_CATALOG:
- catalog.setup(app, settings.API_SERVER_CATALOG)
+ catalog.setup(
+ app,
+ settings.API_SERVER_CATALOG,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
if settings.API_SERVER_STORAGE:
- storage.setup(app, settings.API_SERVER_STORAGE)
+ storage.setup(
+ app,
+ settings.API_SERVER_STORAGE,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
if settings.API_SERVER_DIRECTOR_V2:
- director_v2.setup(app, settings.API_SERVER_DIRECTOR_V2)
+ director_v2.setup(
+ app,
+ settings.API_SERVER_DIRECTOR_V2,
+ tracing_settings=settings.API_SERVER_TRACING,
+ )
# setup app
app.add_event_handler("startup", create_start_app_handler(app))
diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py
index 56a7d648790..461237ce998 100644
--- a/services/api-server/src/simcore_service_api_server/services/catalog.py
+++ b/services/api-server/src/simcore_service_api_server/services/catalog.py
@@ -11,6 +11,7 @@
from models_library.services import ServiceMetaDataPublished, ServiceType
from pydantic import Extra, ValidationError, parse_obj_as, parse_raw_as
from settings_library.catalog import CatalogSettings
+from settings_library.tracing import TracingSettings
from simcore_service_api_server.exceptions.backend_errors import (
ListSolversOrStudiesError,
SolverOrStudyNotFoundError,
@@ -209,10 +210,16 @@ async def get_latest_release(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: CatalogSettings) -> None:
+def setup(
+ app: FastAPI, settings: CatalogSettings, tracing_settings: TracingSettings | None
+) -> None:
if not settings:
settings = CatalogSettings()
setup_client_instance(
- app, CatalogApi, api_baseurl=settings.api_base_url, service_name="catalog"
+ app,
+ CatalogApi,
+ api_baseurl=settings.api_base_url,
+ service_name="catalog",
+ tracing_settings=tracing_settings,
)
diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py
index ff31490b072..938e36c5242 100644
--- a/services/api-server/src/simcore_service_api_server/services/director_v2.py
+++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py
@@ -9,6 +9,7 @@
from models_library.projects_pipeline import ComputationTask
from models_library.projects_state import RunningState
from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, PositiveInt, parse_raw_as
+from settings_library.tracing import TracingSettings
from simcore_service_api_server.exceptions.backend_errors import (
JobNotFoundError,
LogFileNotFoundError,
@@ -191,11 +192,14 @@ async def get_computation_logs(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: DirectorV2Settings) -> None:
+def setup(
+ app: FastAPI, settings: DirectorV2Settings, tracing_settings: TracingSettings | None
+) -> None:
setup_client_instance(
app,
DirectorV2Api,
# WARNING: it has /v0 and /v2 prefixes
api_baseurl=settings.base_url,
service_name="director_v2",
+ tracing_settings=tracing_settings,
)
diff --git a/services/api-server/src/simcore_service_api_server/services/storage.py b/services/api-server/src/simcore_service_api_server/services/storage.py
index 13920d8a931..4e6d8be54ca 100644
--- a/services/api-server/src/simcore_service_api_server/services/storage.py
+++ b/services/api-server/src/simcore_service_api_server/services/storage.py
@@ -14,6 +14,7 @@
from models_library.basic_types import SHA256Str
from models_library.generics import Envelope
from pydantic import AnyUrl, PositiveInt
+from settings_library.tracing import TracingSettings
from starlette.datastructures import URL
from ..core.settings import StorageSettings
@@ -209,12 +210,18 @@ async def create_soft_link(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: StorageSettings) -> None:
+def setup(
+ app: FastAPI, settings: StorageSettings, tracing_settings: TracingSettings | None
+) -> None:
if not settings:
settings = StorageSettings()
setup_client_instance(
- app, StorageApi, api_baseurl=settings.api_base_url, service_name="storage"
+ app,
+ StorageApi,
+ api_baseurl=settings.api_base_url,
+ service_name="storage",
+ tracing_settings=tracing_settings,
)
diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py
index 0d265248dc2..19688728cb5 100644
--- a/services/api-server/src/simcore_service_api_server/services/webserver.py
+++ b/services/api-server/src/simcore_service_api_server/services/webserver.py
@@ -48,6 +48,7 @@
X_SIMCORE_PARENT_NODE_ID,
X_SIMCORE_PARENT_PROJECT_UUID,
)
+from settings_library.tracing import TracingSettings
from simcore_service_api_server.exceptions.backend_errors import (
ConfigurationError,
ForbiddenWalletError,
@@ -588,24 +589,30 @@ async def get_service_pricing_plan(
# MODULES APP SETUP -------------------------------------------------------------
-def setup(app: FastAPI, settings: WebServerSettings) -> None:
+def setup(
+ app: FastAPI,
+ webserver_settings: WebServerSettings,
+ tracing_settings: TracingSettings | None,
+) -> None:
setup_client_instance(
app,
WebserverApi,
- api_baseurl=settings.api_base_url,
+ api_baseurl=webserver_settings.api_base_url,
service_name="webserver",
+ tracing_settings=tracing_settings,
)
setup_client_instance(
app,
LongRunningTasksClient,
api_baseurl="",
service_name="long_running_tasks_client",
+ tracing_settings=tracing_settings,
)
def _on_startup() -> None:
# normalize & encrypt
- secret_key = settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value()
+ secret_key = webserver_settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value()
app.state.webserver_fernet = fernet.Fernet(secret_key)
async def _on_shutdown() -> None:
diff --git a/services/api-server/src/simcore_service_api_server/utils/client_base.py b/services/api-server/src/simcore_service_api_server/utils/client_base.py
index ed58f7429e3..3cc35a74bb6 100644
--- a/services/api-server/src/simcore_service_api_server/utils/client_base.py
+++ b/services/api-server/src/simcore_service_api_server/utils/client_base.py
@@ -4,6 +4,8 @@
import httpx
from fastapi import FastAPI
from httpx import AsyncClient
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
+from settings_library.tracing import TracingSettings
from .app_data import AppDataMixin
@@ -43,14 +45,16 @@ def setup_client_instance(
api_cls: type[BaseServiceClientApi],
api_baseurl,
service_name: str,
+ tracing_settings: TracingSettings | None,
**extra_fields,
) -> None:
"""Helper to add init/cleanup of ServiceClientApi instances in the app lifespam"""
assert issubclass(api_cls, BaseServiceClientApi) # nosec
-
# NOTE: this term is mocked in tests. If you need to modify pay attention to the mock
client = AsyncClient(base_url=api_baseurl)
+ if tracing_settings:
+ setup_httpx_client_tracing(client)
# events
def _create_instance() -> None:
diff --git a/services/api-server/tests/unit/test_utils_client_base.py b/services/api-server/tests/unit/test_utils_client_base.py
index 61370a8ea52..9fe2da1a28c 100644
--- a/services/api-server/tests/unit/test_utils_client_base.py
+++ b/services/api-server/tests/unit/test_utils_client_base.py
@@ -43,6 +43,7 @@ class TheClientApi(BaseServiceClientApi):
service_name="the_service",
health_check_path="/health",
x=42,
+ tracing_settings=None,
)
assert not TheClientApi.get_instance(app)
diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt
index 0c7ff77b07f..995fb44e3f4 100644
--- a/services/autoscaling/requirements/_base.txt
+++ b/services/autoscaling/requirements/_base.txt
@@ -65,8 +65,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -260,6 +258,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -282,6 +281,7 @@ opentelemetry-instrumentation==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
@@ -290,6 +290,8 @@ opentelemetry-instrumentation-botocore==0.47b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -316,6 +318,7 @@ opentelemetry-semantic-conventions==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -323,6 +326,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.3
# via
diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt
index 8abc686eb76..47379c4d69f 100644
--- a/services/autoscaling/requirements/_test.txt
+++ b/services/autoscaling/requirements/_test.txt
@@ -6,10 +6,6 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt
index 890adbe5508..e650830f05d 100644
--- a/services/catalog/requirements/_base.txt
+++ b/services/catalog/requirements/_base.txt
@@ -41,9 +41,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via
# -r requirements/_base.in
@@ -191,6 +189,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -210,6 +209,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -218,6 +218,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -237,6 +239,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -244,6 +247,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py
index a28dc8c5a32..94f35b3d1ea 100644
--- a/services/catalog/src/simcore_service_catalog/core/application.py
+++ b/services/catalog/src/simcore_service_catalog/core/application.py
@@ -46,8 +46,13 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI:
# STATE
app.state.settings = settings
+ if settings.CATALOG_TRACING:
+ setup_tracing(app, settings.CATALOG_TRACING, APP_NAME)
+
# STARTUP-EVENT
- app.add_event_handler("startup", create_on_startup(app))
+ app.add_event_handler(
+ "startup", create_on_startup(app, tracing_settings=settings.CATALOG_TRACING)
+ )
# PLUGIN SETUP
setup_function_services(app)
@@ -65,8 +70,6 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI:
app.add_middleware(
BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header
)
- if app.state.settings.CATALOG_TRACING:
- setup_tracing(app, app.state.settings.CATALOG_TRACING, APP_NAME)
app.add_middleware(GZipMiddleware)
diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py
index f22adbba4ec..dde295a2e56 100644
--- a/services/catalog/src/simcore_service_catalog/core/events.py
+++ b/services/catalog/src/simcore_service_catalog/core/events.py
@@ -5,6 +5,7 @@
from fastapi import FastAPI
from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db
from servicelib.logging_utils import log_context
+from settings_library.tracing import TracingSettings
from .._meta import APP_FINISHED_BANNER_MSG, APP_STARTED_BANNER_MSG
from ..db.events import setup_default_product
@@ -26,7 +27,9 @@ def _flush_finished_banner() -> None:
print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201
-def create_on_startup(app: FastAPI) -> EventCallable:
+def create_on_startup(
+ app: FastAPI, tracing_settings: TracingSettings | None
+) -> EventCallable:
async def _() -> None:
_flush_started_banner()
@@ -37,7 +40,7 @@ async def _() -> None:
if app.state.settings.CATALOG_DIRECTOR:
# setup connection to director
- await setup_director(app)
+ await setup_director(app, tracing_settings=tracing_settings)
# FIXME: check director service is in place and ready. Hand-shake??
# SEE https://github.com/ITISFoundation/osparc-simcore/issues/1728
diff --git a/services/catalog/src/simcore_service_catalog/services/director.py b/services/catalog/src/simcore_service_catalog/services/director.py
index 7c6925902f4..e97b72bb3f2 100644
--- a/services/catalog/src/simcore_service_catalog/services/director.py
+++ b/services/catalog/src/simcore_service_catalog/services/director.py
@@ -11,7 +11,9 @@
from models_library.services_metadata_published import ServiceMetaDataPublished
from models_library.services_types import ServiceKey, ServiceVersion
from models_library.utils.json_serialization import json_dumps
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from servicelib.logging_utils import log_context
+from settings_library.tracing import TracingSettings
from starlette import status
from tenacity.asyncio import AsyncRetrying
from tenacity.before_sleep import before_sleep_log
@@ -106,11 +108,15 @@ class DirectorApi:
SEE services/catalog/src/simcore_service_catalog/api/dependencies/director.py
"""
- def __init__(self, base_url: str, app: FastAPI):
+ def __init__(
+ self, base_url: str, app: FastAPI, tracing_settings: TracingSettings | None
+ ):
self.client = httpx.AsyncClient(
base_url=base_url,
timeout=app.state.settings.CATALOG_CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
)
+ if tracing_settings:
+ setup_httpx_client_tracing(self.client)
self.vtag = app.state.settings.CATALOG_DIRECTOR.DIRECTOR_VTAG
async def close(self):
@@ -151,15 +157,25 @@ async def get_service(
return ServiceMetaDataPublished.parse_obj(data[0])
-async def setup_director(app: FastAPI) -> None:
+async def setup_director(
+ app: FastAPI, tracing_settings: TracingSettings | None
+) -> None:
if settings := app.state.settings.CATALOG_DIRECTOR:
with log_context(
_logger, logging.DEBUG, "Setup director at %s", f"{settings.base_url=}"
):
async for attempt in AsyncRetrying(**_director_startup_retry_policy):
- client = DirectorApi(base_url=settings.base_url, app=app)
+ client = DirectorApi(
+ base_url=settings.base_url,
+ app=app,
+ tracing_settings=tracing_settings,
+ )
with attempt:
- client = DirectorApi(base_url=settings.base_url, app=app)
+ client = DirectorApi(
+ base_url=settings.base_url,
+ app=app,
+ tracing_settings=tracing_settings,
+ )
if not await client.is_responsive():
with suppress(Exception):
await client.close()
diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt
index 9443ee269ef..344d07b5339 100644
--- a/services/clusters-keeper/requirements/_base.txt
+++ b/services/clusters-keeper/requirements/_base.txt
@@ -63,8 +63,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -258,6 +256,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -280,6 +279,7 @@ opentelemetry-instrumentation==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
@@ -288,6 +288,8 @@ opentelemetry-instrumentation-botocore==0.47b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -314,6 +316,7 @@ opentelemetry-semantic-conventions==0.47b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -321,6 +324,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.3
# via
diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt
index e2832a14944..00a7437644c 100644
--- a/services/clusters-keeper/requirements/_test.txt
+++ b/services/clusters-keeper/requirements/_test.txt
@@ -19,10 +19,6 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt
index 6cdd686b12f..dc0ea01d6f9 100644
--- a/services/dask-sidecar/requirements/_base.txt
+++ b/services/dask-sidecar/requirements/_base.txt
@@ -46,8 +46,6 @@ arrow==1.3.0
# -r requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/_base.in
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt
index f8fe44d6058..5a9116dfe47 100644
--- a/services/datcore-adapter/requirements/_base.txt
+++ b/services/datcore-adapter/requirements/_base.txt
@@ -39,8 +39,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -166,6 +164,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -184,12 +183,15 @@ opentelemetry-instrumentation==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.47b0
@@ -208,6 +210,7 @@ opentelemetry-semantic-conventions==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -215,6 +218,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt
index 02162fe9a64..dfcfa5ab028 100644
--- a/services/director-v2/requirements/_base.txt
+++ b/services/director-v2/requirements/_base.txt
@@ -81,7 +81,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -340,6 +339,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -363,6 +363,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiopg==0.48b0
@@ -377,6 +378,8 @@ opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -402,6 +405,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -409,6 +413,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
ordered-set==4.1.0
# via -r requirements/_base.in
diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_client.py b/services/director-v2/src/simcore_service_director_v2/cli/_client.py
index 541d90688dc..872c08f3b5f 100644
--- a/services/director-v2/src/simcore_service_director_v2/cli/_client.py
+++ b/services/director-v2/src/simcore_service_director_v2/cli/_client.py
@@ -12,7 +12,9 @@ class ThinDV2LocalhostClient(BaseThinClient):
def __init__(self):
super().__init__(
- total_retry_interval=10, default_http_client_timeout=Timeout(5)
+ total_retry_interval=10,
+ default_http_client_timeout=Timeout(5),
+ tracing_settings=None,
)
def _get_url(self, postfix: str) -> str:
diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py
index 893aed2504e..70ee252aa20 100644
--- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py
+++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py
@@ -36,13 +36,16 @@
async def _initialized_app(only_db: bool = False) -> AsyncIterator[FastAPI]:
app = create_base_app()
settings: AppSettings = app.state.settings
-
# Initialize minimal required components for the application
db.setup(app, settings.POSTGRES)
if not only_db:
dynamic_sidecar.setup(app)
- director_v0.setup(app, settings.DIRECTOR_V0)
+ director_v0.setup(
+ app,
+ director_v0_settings=settings.DIRECTOR_V0,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
await app.router.startup()
yield app
diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py
index f1c81f18f98..6487d725143 100644
--- a/services/director-v2/src/simcore_service_director_v2/core/application.py
+++ b/services/director-v2/src/simcore_service_director_v2/core/application.py
@@ -149,19 +149,34 @@ def init_app(settings: AppSettings | None = None) -> FastAPI:
substitutions.setup(app)
+ if settings.DIRECTOR_V2_TRACING:
+ setup_tracing(app, settings.DIRECTOR_V2_TRACING, APP_NAME)
+
if settings.DIRECTOR_V0.DIRECTOR_V0_ENABLED:
- director_v0.setup(app, settings.DIRECTOR_V0)
+ director_v0.setup(
+ app,
+ director_v0_settings=settings.DIRECTOR_V0,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
if settings.DIRECTOR_V2_STORAGE:
- storage.setup(app, settings.DIRECTOR_V2_STORAGE)
+ storage.setup(
+ app,
+ storage_settings=settings.DIRECTOR_V2_STORAGE,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
if settings.DIRECTOR_V2_CATALOG:
- catalog.setup(app, settings.DIRECTOR_V2_CATALOG)
+ catalog.setup(
+ app,
+ catalog_settings=settings.DIRECTOR_V2_CATALOG,
+ tracing_settings=settings.DIRECTOR_V2_TRACING,
+ )
db.setup(app, settings.POSTGRES)
if settings.DYNAMIC_SERVICES.DIRECTOR_V2_DYNAMIC_SERVICES_ENABLED:
- dynamic_services.setup(app)
+ dynamic_services.setup(app, tracing_settings=settings.DIRECTOR_V2_TRACING)
dynamic_scheduler_enabled = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR and (
settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER
@@ -192,8 +207,6 @@ def init_app(settings: AppSettings | None = None) -> FastAPI:
if settings.DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED:
instrumentation.setup(app)
- if settings.DIRECTOR_V2_TRACING:
- setup_tracing(app, app.state.settings.DIRECTOR_V2_TRACING, APP_NAME)
if settings.DIRECTOR_V2_PROFILING:
app.add_middleware(ProfilerMiddleware)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py
index f5e378afa43..22b4eb89bd3 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py
@@ -9,26 +9,37 @@
from models_library.services_resources import ServiceResourcesDict
from models_library.users import UserID
from pydantic import parse_obj_as
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from settings_library.catalog import CatalogSettings
+from settings_library.tracing import TracingSettings
from ..utils.client_decorators import handle_errors, handle_retry
logger = logging.getLogger(__name__)
-def setup(app: FastAPI, settings: CatalogSettings) -> None:
- if not settings:
- settings = CatalogSettings()
+def setup(
+ app: FastAPI,
+ catalog_settings: CatalogSettings | None,
+ tracing_settings: TracingSettings | None,
+) -> None:
+
+ if not catalog_settings:
+ catalog_settings = CatalogSettings()
async def on_startup() -> None:
+ client = httpx.AsyncClient(
+ base_url=f"{catalog_settings.api_base_url}",
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
+
CatalogClient.create(
app,
- client=httpx.AsyncClient(
- base_url=f"{settings.api_base_url}",
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
- ),
+ client=client,
)
- logger.debug("created client for catalog: %s", settings.api_base_url)
+ logger.debug("created client for catalog: %s", catalog_settings.api_base_url)
# Here we currently do not ensure the catalog is up on start
# This will need to be assessed.
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py
index 0bc8c799dcb..3229ddc642a 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py
@@ -1,7 +1,4 @@
-""" Module that takes care of communications with director v0 service
-
-
-"""
+"""Module that takes care of communications with director v0 service"""
import logging
import urllib.parse
@@ -20,7 +17,9 @@
from models_library.service_settings_labels import SimcoreServiceLabels
from models_library.services import ServiceKey, ServiceKeyVersion, ServiceVersion
from models_library.users import UserID
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from servicelib.logging_utils import log_decorator
+from settings_library.tracing import TracingSettings
from ..core.settings import DirectorV0Settings
from ..utils.client_decorators import handle_errors, handle_retry
@@ -31,25 +30,34 @@
# Module's setup logic ---------------------------------------------
-def setup(app: FastAPI, settings: DirectorV0Settings | None):
- if not settings:
- settings = DirectorV0Settings()
+def setup(
+ app: FastAPI,
+ director_v0_settings: DirectorV0Settings | None,
+ tracing_settings: TracingSettings | None,
+):
+ if not director_v0_settings:
+ director_v0_settings = DirectorV0Settings()
def on_startup() -> None:
+ client = httpx.AsyncClient(
+ base_url=f"{director_v0_settings.endpoint}",
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
DirectorV0Client.create(
app,
- client=httpx.AsyncClient(
- base_url=f"{settings.endpoint}",
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
- ),
+ client=client,
+ )
+ logger.debug(
+ "created client for director-v0: %s", director_v0_settings.endpoint
)
- logger.debug("created client for director-v0: %s", settings.endpoint)
async def on_shutdown() -> None:
client = DirectorV0Client.instance(app).client
await client.aclose()
del client
- logger.debug("delete client for director-v0: %s", settings.endpoint)
+ logger.debug("delete client for director-v0: %s", director_v0_settings.endpoint)
app.add_event_handler("startup", on_startup)
app.add_event_handler("shutdown", on_shutdown)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py
index d572a9f23fb..acbc08849a6 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py
@@ -8,19 +8,24 @@
import httpx
from fastapi import FastAPI
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
+from settings_library.tracing import TracingSettings
from ..utils.client_decorators import handle_errors, handle_retry
logger = logging.getLogger(__name__)
-def setup(app: FastAPI) -> None:
+def setup(app: FastAPI, tracing_settings: TracingSettings | None) -> None:
def on_startup() -> None:
+ client = httpx.AsyncClient(
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
ServicesClient.create(
app,
- client=httpx.AsyncClient(
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT
- ),
+ client=client,
)
async def on_shutdown() -> None:
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py
index 241f32fe70e..feba415ecd0 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py
@@ -12,6 +12,7 @@
expect_status,
retry_on_errors,
)
+from settings_library.tracing import TracingSettings
from ....core.dynamic_services_settings.scheduler import (
DynamicServicesSchedulerSettings,
@@ -31,6 +32,9 @@ def __init__(self, app: FastAPI):
scheduler_settings: DynamicServicesSchedulerSettings = (
app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER
)
+ tracing_settings: TracingSettings | None = (
+ app.state.settings.DIRECTOR_V2_TRACING
+ )
# timeouts
self._health_request_timeout = Timeout(1.0, connect=1.0)
@@ -53,6 +57,7 @@ def __init__(self, app: FastAPI):
scheduler_settings.DYNAMIC_SIDECAR_API_REQUEST_TIMEOUT,
connect=scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT,
),
+ tracing_settings=tracing_settings,
)
def _get_url(
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py
index 2c546ea3d84..4eaf3ba2016 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py
@@ -24,6 +24,7 @@
from models_library.services import ServiceKey, ServiceVersion
from models_library.wallets import WalletID
from pydantic import parse_obj_as
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.errors import PricingPlanUnitNotFoundError
from ..core.settings import AppSettings
@@ -41,6 +42,8 @@ def create(cls, settings: AppSettings) -> "ResourceUsageTrackerClient":
client = httpx.AsyncClient(
base_url=settings.DIRECTOR_V2_RESOURCE_USAGE_TRACKER.api_base_url,
)
+ if settings.DIRECTOR_V2_TRACING:
+ setup_httpx_client_tracing(client=client)
exit_stack = contextlib.AsyncExitStack()
return cls(client=client, exit_stack=exit_stack)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/storage.py b/services/director-v2/src/simcore_service_director_v2/modules/storage.py
index 98e18845333..c3e9cd21576 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/storage.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/storage.py
@@ -8,9 +8,11 @@
import httpx
from fastapi import FastAPI, HTTPException
from models_library.users import UserID
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from servicelib.logging_utils import log_decorator
from settings_library.s3 import S3Settings
from settings_library.storage import StorageSettings
+from settings_library.tracing import TracingSettings
# Module's business logic ---------------------------------------------
from starlette import status
@@ -23,19 +25,27 @@
# Module's setup logic ---------------------------------------------
-def setup(app: FastAPI, settings: StorageSettings):
- if not settings:
- settings = StorageSettings()
+def setup(
+ app: FastAPI,
+ storage_settings: StorageSettings | None,
+ tracing_settings: TracingSettings | None,
+):
+
+ if not storage_settings:
+ storage_settings = StorageSettings()
def on_startup() -> None:
+ client = httpx.AsyncClient(
+ base_url=f"{storage_settings.api_base_url}",
+ timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
+ )
+ if tracing_settings:
+ setup_httpx_client_tracing(client=client)
StorageClient.create(
app,
- client=httpx.AsyncClient(
- base_url=f"{settings.api_base_url}",
- timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT,
- ),
+ client=client,
)
- logger.debug("created client for storage: %s", settings.api_base_url)
+ logger.debug("created client for storage: %s", storage_settings.api_base_url)
async def on_shutdown() -> None:
client = StorageClient.instance(app).client
diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
index 720e7d0c3e1..ec955f1e167 100644
--- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
+++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py
@@ -64,6 +64,7 @@
from settings_library.rabbit import RabbitSettings
from settings_library.redis import RedisSettings
from settings_library.storage import StorageSettings
+from settings_library.tracing import TracingSettings
from simcore_postgres_database.models.comp_pipeline import comp_pipeline
from simcore_postgres_database.models.comp_tasks import comp_tasks
from simcore_postgres_database.models.projects_networks import projects_networks
@@ -340,8 +341,14 @@ async def patch_storage_setup(
original_setup = dv2_modules_storage.setup
- def setup(app: FastAPI, settings: StorageSettings) -> None:
- original_setup(app, local_settings)
+ def setup(
+ app: FastAPI,
+ storage_settings: StorageSettings,
+ tracing_settings: TracingSettings | None,
+ ) -> None:
+ original_setup(
+ app, storage_settings=local_settings, tracing_settings=tracing_settings
+ )
mocker.patch("simcore_service_director_v2.modules.storage.setup", side_effect=setup)
diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt
index cb2cc603fb0..3462f0ba65b 100644
--- a/services/dynamic-scheduler/requirements/_base.txt
+++ b/services/dynamic-scheduler/requirements/_base.txt
@@ -40,9 +40,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -172,6 +170,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -191,6 +190,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -199,6 +199,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -218,6 +220,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -225,6 +228,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py
index e823216576b..68aae3b97f3 100644
--- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py
+++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py
@@ -37,6 +37,7 @@ def __init__(self, app: FastAPI) -> None:
DEFAULT_LEGACY_WB_TO_DV2_HTTP_REQUESTS_TIMEOUT_S
),
extra_allowed_method_names={"attach_lifespan_to"},
+ tracing_settings=settings.DYNAMIC_SCHEDULER_TRACING,
)
@retry_on_errors()
diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt
index 40c32b696ec..559440b03f0 100644
--- a/services/dynamic-sidecar/requirements/_base.txt
+++ b/services/dynamic-sidecar/requirements/_base.txt
@@ -76,7 +76,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -243,6 +242,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -266,6 +266,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-aiopg==0.48b0
@@ -280,6 +281,8 @@ opentelemetry-instrumentation-dbapi==0.48b0
# via opentelemetry-instrumentation-aiopg
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -305,6 +308,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -312,6 +316,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt
index 26a626f01db..8e46a857186 100644
--- a/services/efs-guardian/requirements/_base.txt
+++ b/services/efs-guardian/requirements/_base.txt
@@ -69,9 +69,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==24.2.0
@@ -238,6 +236,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -261,6 +260,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -271,6 +271,8 @@ opentelemetry-instrumentation-botocore==0.48b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -298,6 +300,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -305,6 +308,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.7
# via
diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt
index 8bdc1ec8ebf..f188e8071de 100644
--- a/services/efs-guardian/requirements/_test.txt
+++ b/services/efs-guardian/requirements/_test.txt
@@ -23,10 +23,6 @@ anyio==4.6.2.post1
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==24.2.0
# via
# -c requirements/_base.txt
diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt
index c6e253b5e6a..732bac0872f 100644
--- a/services/invitations/requirements/_base.txt
+++ b/services/invitations/requirements/_base.txt
@@ -35,8 +35,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/_base.in
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
-async-timeout==4.0.3
- # via redis
attrs==23.2.0
# via
# aiohttp
@@ -153,6 +151,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -171,12 +170,15 @@ opentelemetry-instrumentation==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.47b0
@@ -195,6 +197,7 @@ opentelemetry-semantic-conventions==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -202,6 +205,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt
index 88aae6375d4..c38b7880c1d 100644
--- a/services/payments/requirements/_base.txt
+++ b/services/payments/requirements/_base.txt
@@ -43,9 +43,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -201,6 +199,7 @@ opentelemetry-api==1.27.0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -220,6 +219,7 @@ opentelemetry-instrumentation==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.48b0
@@ -228,6 +228,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0
# via -r requirements/../../../packages/postgres-database/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.48b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.48b0
# via -r requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
@@ -247,6 +249,7 @@ opentelemetry-semantic-conventions==0.48b0
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -254,6 +257,7 @@ opentelemetry-util-http==0.48b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.6
# via
diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py
index 0b1097492c6..44c54b6108d 100644
--- a/services/payments/src/simcore_service_payments/services/payments_gateway.py
+++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py
@@ -25,6 +25,7 @@
HealthMixinMixin,
)
from servicelib.fastapi.httpx_utils import to_curl_command
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from simcore_service_payments.models.schemas.acknowledgements import (
AckPaymentWithPaymentMethod,
)
@@ -216,5 +217,7 @@ def setup_payments_gateway(app: FastAPI):
secret=settings.PAYMENTS_GATEWAY_API_SECRET.get_secret_value()
),
)
+ if settings.PAYMENTS_TRACING:
+ setup_httpx_client_tracing(api.client)
api.attach_lifespan_to(app)
api.set_to_app_state(app)
diff --git a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py
index e66f650fe7b..3f114540f99 100644
--- a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py
+++ b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py
@@ -25,6 +25,7 @@
BaseHTTPApi,
HealthMixinMixin,
)
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.settings import ApplicationSettings
@@ -73,5 +74,7 @@ def setup_resource_usage_tracker(app: FastAPI):
api = ResourceUsageTrackerApi.from_client_kwargs(
base_url=settings.PAYMENTS_RESOURCE_USAGE_TRACKER.base_url,
)
+ if settings.PAYMENTS_TRACING:
+ setup_httpx_client_tracing(api.client)
api.set_to_app_state(app)
api.attach_lifespan_to(app)
diff --git a/services/payments/src/simcore_service_payments/services/stripe.py b/services/payments/src/simcore_service_payments/services/stripe.py
index 38cc21fab0e..3f3fa933bb6 100644
--- a/services/payments/src/simcore_service_payments/services/stripe.py
+++ b/services/payments/src/simcore_service_payments/services/stripe.py
@@ -19,6 +19,7 @@
BaseHTTPApi,
HealthMixinMixin,
)
+from servicelib.fastapi.tracing import setup_httpx_client_tracing
from ..core.errors import StripeRuntimeError
from ..core.settings import ApplicationSettings
@@ -91,6 +92,8 @@ def setup_stripe(app: FastAPI):
base_url=settings.PAYMENTS_STRIPE_URL,
auth=_StripeBearerAuth(settings.PAYMENTS_STRIPE_API_SECRET.get_secret_value()),
)
+ if settings.PAYMENTS_TRACING:
+ setup_httpx_client_tracing(api.client)
api.set_to_app_state(app)
api.attach_lifespan_to(app)
diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt
index 97a3bd129b7..bbd3cddf53d 100644
--- a/services/resource-usage-tracker/requirements/_base.txt
+++ b/services/resource-usage-tracker/requirements/_base.txt
@@ -69,9 +69,7 @@ arrow==1.3.0
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
async-timeout==4.0.3
- # via
- # asyncpg
- # redis
+ # via asyncpg
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
@@ -260,6 +258,7 @@ opentelemetry-api==1.26.0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-propagator-aws-xray
@@ -283,6 +282,7 @@ opentelemetry-instrumentation==0.47b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
opentelemetry-instrumentation-asgi==0.47b0
@@ -293,6 +293,8 @@ opentelemetry-instrumentation-botocore==0.47b0
# via -r requirements/../../../packages/aws-library/requirements/_base.in
opentelemetry-instrumentation-fastapi==0.47b0
# via -r requirements/../../../packages/service-library/requirements/_fastapi.in
+opentelemetry-instrumentation-httpx==0.47b0
+ # via -r requirements/../../../packages/service-library/requirements/_fastapi.in
opentelemetry-instrumentation-redis==0.47b0
# via
# -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in
@@ -320,6 +322,7 @@ opentelemetry-semantic-conventions==0.47b0
# opentelemetry-instrumentation-asyncpg
# opentelemetry-instrumentation-botocore
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
@@ -327,6 +330,7 @@ opentelemetry-util-http==0.47b0
# via
# opentelemetry-instrumentation-asgi
# opentelemetry-instrumentation-fastapi
+ # opentelemetry-instrumentation-httpx
# opentelemetry-instrumentation-requests
orjson==3.10.0
# via
diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt
index e70753feb19..4db08363ded 100644
--- a/services/resource-usage-tracker/requirements/_test.txt
+++ b/services/resource-usage-tracker/requirements/_test.txt
@@ -10,10 +10,6 @@ anyio==4.3.0
# httpx
asgi-lifespan==2.1.0
# via -r requirements/_test.in
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt
index edadd851b65..c73f10b2ef0 100644
--- a/services/storage/requirements/_base.txt
+++ b/services/storage/requirements/_base.txt
@@ -78,7 +78,6 @@ async-timeout==4.0.3
# via
# aiopg
# asyncpg
- # redis
asyncpg==0.29.0
# via sqlalchemy
attrs==23.2.0
diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt
index 1e33824a7c0..f0132fe4c7c 100644
--- a/services/storage/requirements/_test.txt
+++ b/services/storage/requirements/_test.txt
@@ -13,10 +13,6 @@ aiosignal==1.3.1
# aiohttp
antlr4-python3-runtime==4.13.2
# via moto
-async-timeout==4.0.3
- # via
- # -c requirements/_base.txt
- # redis
attrs==23.2.0
# via
# -c requirements/_base.txt
diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt
index 5b42c95fffd..01c8859912d 100644
--- a/services/web/server/requirements/_base.txt
+++ b/services/web/server/requirements/_base.txt
@@ -89,7 +89,6 @@ async-timeout==4.0.3
# via
# aiohttp
# aiopg
- # redis
asyncpg==0.27.0
# via
# -r requirements/_base.in
diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt
index 67fcd247fda..3aab7cde47d 100644
--- a/services/web/server/requirements/_test.txt
+++ b/services/web/server/requirements/_test.txt
@@ -18,7 +18,6 @@ async-timeout==4.0.3
# via
# -c requirements/_base.txt
# aiohttp
- # redis
asyncpg==0.27.0
# via
# -c requirements/_base.txt
diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt
index 2f4dc983011..dad3c42339d 100644
--- a/tests/swarm-deploy/requirements/_test.txt
+++ b/tests/swarm-deploy/requirements/_test.txt
@@ -197,6 +197,10 @@ opentelemetry-api==1.27.0
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-instrumentation
+ # opentelemetry-instrumentation-aiopg
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
# opentelemetry-semantic-conventions
@@ -213,7 +217,22 @@ opentelemetry-exporter-otlp-proto-grpc==1.27.0
opentelemetry-exporter-otlp-proto-http==1.27.0
# via opentelemetry-exporter-otlp
opentelemetry-instrumentation==0.48b0
- # via opentelemetry-instrumentation-requests
+ # via
+ # opentelemetry-instrumentation-aiopg
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
+ # opentelemetry-instrumentation-requests
+opentelemetry-instrumentation-aiopg==0.48b0
+ # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in
+opentelemetry-instrumentation-asyncpg==0.48b0
+ # via -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in
+opentelemetry-instrumentation-dbapi==0.48b0
+ # via opentelemetry-instrumentation-aiopg
+opentelemetry-instrumentation-redis==0.48b0
+ # via
+ # -r requirements/../../../packages/service-library/requirements/_base.in
+ # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in
opentelemetry-instrumentation-requests==0.48b0
# via
# -r requirements/../../../packages/service-library/requirements/_base.in
@@ -231,6 +250,9 @@ opentelemetry-sdk==1.27.0
# opentelemetry-exporter-otlp-proto-http
opentelemetry-semantic-conventions==0.48b0
# via
+ # opentelemetry-instrumentation-asyncpg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
# opentelemetry-instrumentation-requests
# opentelemetry-sdk
opentelemetry-util-http==0.48b0
@@ -494,6 +516,9 @@ wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
+ # opentelemetry-instrumentation-aiopg
+ # opentelemetry-instrumentation-dbapi
+ # opentelemetry-instrumentation-redis
yarl==1.12.1
# via
# -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in
From 17f486e3848c3943bb0b5f7f2336aa19a2dbcee3 Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Thu, 14 Nov 2024 11:16:18 +0100
Subject: [PATCH 16/17] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20UX:=20Create?=
=?UTF-8?q?=20Study=20from=20Template=20(#6706)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../osparc/dashboard/ResourceBrowserBase.js | 4 +-
.../class/osparc/dashboard/StudyBrowser.js | 2 +-
.../class/osparc/dashboard/TemplateBrowser.js | 98 ++++++++---
.../class/osparc/node/TierSelectionView.js | 2 +-
.../class/osparc/study/NodePricingUnits.js | 76 ++++++---
.../source/class/osparc/study/StudyOptions.js | 152 +++++++++++-------
.../class/osparc/study/StudyPricingUnits.js | 34 +++-
.../client/source/class/osparc/study/Utils.js | 2 +-
8 files changed, 259 insertions(+), 111 deletions(-)
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
index a80672bd3cd..344507aad9a 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js
@@ -95,7 +95,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
return isLogged;
},
- startStudyById: function(studyId, openCB, cancelCB, isStudyCreation = false) {
+ startStudyById: function(studyId, openCB, cancelCB, showStudyOptions = false) {
if (!osparc.dashboard.ResourceBrowserBase.checkLoggedIn()) {
return;
}
@@ -117,7 +117,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", {
osparc.data.Resources.fetch("studies", "getWallet", params)
.then(wallet => {
if (
- isStudyCreation ||
+ showStudyOptions ||
wallet === null ||
osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null
) {
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
index ceaee03b3ac..a2de2032524 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -1229,7 +1229,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
folderId: this.getCurrentFolderId(),
};
osparc.study.Utils.createStudyFromTemplate(templateCopyData, this._loadingPage, contextProps)
- .then(studyId => this.__startStudyAfterCreating(studyId))
+ .then(studyData => this.__startStudyAfterCreating(studyData["uuid"]))
.catch(err => {
this._hideLoadingPage();
osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR");
diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js
index d597d8a438c..7f4f0362cab 100644
--- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js
+++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js
@@ -137,27 +137,85 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", {
return;
}
- this._showLoadingPage(this.tr("Creating ") + (templateData.name || osparc.product.Utils.getStudyAlias({firstUpperCase: true})));
- osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage)
- .then(studyId => {
- const openCB = () => this._hideLoadingPage();
- const cancelCB = () => {
- this._hideLoadingPage();
- const params = {
- url: {
- studyId
- }
+ const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true});
+ this._showLoadingPage(this.tr("Creating ") + (templateData.name || studyAlias));
+
+ const studyOptions = new osparc.study.StudyOptions();
+ // they will be patched once the study is created
+ studyOptions.setPatchStudy(false);
+ studyOptions.setStudyData(templateData);
+ const win = osparc.study.StudyOptions.popUpInWindow(studyOptions);
+ win.moveItUp();
+ const cancelStudyOptions = () => {
+ this._hideLoadingPage();
+ win.close();
+ }
+ win.addListener("cancel", () => cancelStudyOptions());
+ studyOptions.addListener("cancel", () => cancelStudyOptions());
+ studyOptions.addListener("startStudy", () => {
+ const newName = studyOptions.getChildControl("title-field").getValue();
+ const walletSelection = studyOptions.getChildControl("wallet-selector").getSelection();
+ const nodesPricingUnits = studyOptions.getChildControl("study-pricing-units").getNodePricingUnits();
+ win.close();
+ this._showLoadingPage(this.tr("Creating ") + (newName || studyAlias));
+ osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage)
+ .then(newStudyData => {
+ const studyId = newStudyData["uuid"];
+ const openCB = () => {
+ this._hideLoadingPage();
};
- osparc.data.Resources.fetch("studies", "delete", params);
- };
- const isStudyCreation = true;
- this._startStudyById(studyId, openCB, cancelCB, isStudyCreation);
- })
- .catch(err => {
- this._hideLoadingPage();
- osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR");
- console.error(err);
- });
+ const cancelCB = () => {
+ this._hideLoadingPage();
+ const params = {
+ url: {
+ studyId
+ }
+ };
+ osparc.data.Resources.fetch("studies", "delete", params);
+ };
+
+ const promises = [];
+ // patch the name
+ if (newStudyData["name"] !== newName) {
+ promises.push(osparc.study.StudyOptions.updateName(newStudyData, newName));
+ }
+ // patch the wallet
+ if (walletSelection.length && walletSelection[0]["walletId"]) {
+ const walletId = walletSelection[0]["walletId"];
+ promises.push(osparc.study.StudyOptions.updateWallet(newStudyData["uuid"], walletId));
+ }
+ // patch the pricing units
+ // the nodeIds are coming from the original template, they need to be mapped to the newStudy
+ const workbench = newStudyData["workbench"];
+ const nodesIdsListed = [];
+ Object.keys(workbench).forEach(nodeId => {
+ const node = workbench[nodeId];
+ if (osparc.study.StudyPricingUnits.includeInList(node)) {
+ nodesIdsListed.push(nodeId);
+ }
+ });
+ nodesPricingUnits.forEach((nodePricingUnits, idx) => {
+ const selectedPricingUnitId = nodePricingUnits.getPricingUnits().getSelectedUnitId();
+ if (selectedPricingUnitId) {
+ const nodeId = nodesIdsListed[idx];
+ const pricingPlanId = nodePricingUnits.getPricingPlanId();
+ promises.push(osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId));
+ }
+ });
+
+ Promise.all(promises)
+ .then(() => {
+ win.close();
+ const showStudyOptions = false;
+ this._startStudyById(studyId, openCB, cancelCB, showStudyOptions);
+ });
+ })
+ .catch(err => {
+ this._hideLoadingPage();
+ osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR");
+ console.error(err);
+ });
+ });
},
// LAYOUT //
diff --git a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js
index 34dfc397b37..ffa1431a00e 100644
--- a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js
+++ b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js
@@ -105,7 +105,7 @@ qx.Class.define("osparc.node.TierSelectionView", {
if (selection.length) {
tierBox.setEnabled(false);
const selectedUnitId = selection[0].getModel();
- osparc.study.NodePricingUnits.pricingUnitSelected(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId)
+ osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId)
.finally(() => {
tierBox.setEnabled(true);
showSelectedTier(selectedUnitId);
diff --git a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js
index d8caa28b68f..76918e12b3e 100644
--- a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js
+++ b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js
@@ -30,8 +30,10 @@ qx.Class.define("osparc.study.NodePricingUnits", {
layout: new qx.ui.layout.VBox()
});
- this.__studyId = studyId;
- this.__nodeId = nodeId;
+ this.set({
+ studyId,
+ nodeId,
+ });
if (node instanceof osparc.data.model.Node) {
this.__nodeKey = node.getKey();
this.__nodeVersion = node.getVersion();
@@ -43,8 +45,35 @@ qx.Class.define("osparc.study.NodePricingUnits", {
}
},
+ properties: {
+ studyId: {
+ check: "String",
+ init: null,
+ nullable: false,
+ },
+
+ nodeId: {
+ check: "String",
+ init: null,
+ nullable: false,
+ },
+
+ pricingPlanId: {
+ check: "Number",
+ init: null,
+ nullable: false,
+ },
+
+ patchNode: {
+ check: "Boolean",
+ init: true,
+ nullable: false,
+ event: "changePatchNode",
+ },
+ },
+
statics: {
- pricingUnitSelected: function(studyId, nodeId, planId, selectedUnitId) {
+ patchPricingUnitSelection: function(studyId, nodeId, planId, selectedUnitId) {
const params = {
url: {
studyId,
@@ -58,19 +87,18 @@ qx.Class.define("osparc.study.NodePricingUnits", {
},
members: {
- __studyId: null,
- __nodeId: null,
__nodeKey: null,
__nodeVersion: null,
__nodeLabel: null,
+ __pricingUnits: null,
showPricingUnits: function(inGroupBox = true) {
return new Promise(resolve => {
const nodeKey = this.__nodeKey;
const nodeVersion = this.__nodeVersion;
const nodeLabel = this.__nodeLabel;
- const studyId = this.__studyId;
- const nodeId = this.__nodeId;
+ const studyId = this.getStudyId();
+ const nodeId = this.getNodeId();
const plansParams = {
url: osparc.data.Resources.getServiceUrl(
@@ -79,30 +107,36 @@ qx.Class.define("osparc.study.NodePricingUnits", {
)
};
osparc.data.Resources.fetch("services", "pricingPlans", plansParams)
- .then(pricingPlans => {
- if (pricingPlans) {
+ .then(pricingPlan => {
+ if (pricingPlan) {
const unitParams = {
url: {
studyId,
nodeId
}
};
+ this.set({
+ pricingPlanId: pricingPlan["pricingPlanId"]
+ });
osparc.data.Resources.fetch("studies", "getPricingUnit", unitParams)
.then(preselectedPricingUnit => {
- if (pricingPlans && "pricingUnits" in pricingPlans && pricingPlans["pricingUnits"].length) {
- const unitButtons = new osparc.study.PricingUnits(pricingPlans["pricingUnits"], preselectedPricingUnit);
+ if (pricingPlan && "pricingUnits" in pricingPlan && pricingPlan["pricingUnits"].length) {
+ const pricingUnitButtons = this.__pricingUnits = new osparc.study.PricingUnits(pricingPlan["pricingUnits"], preselectedPricingUnit);
if (inGroupBox) {
const pricingUnitsLayout = osparc.study.StudyOptions.createGroupBox(nodeLabel);
- pricingUnitsLayout.add(unitButtons);
+ pricingUnitsLayout.add(pricingUnitButtons);
this._add(pricingUnitsLayout);
} else {
- this._add(unitButtons);
+ this._add(pricingUnitButtons);
}
- unitButtons.addListener("changeSelectedUnitId", e => {
- unitButtons.setEnabled(false);
- const selectedPricingUnitId = e.getData();
- this.self().pricingUnitSelected(this.__studyId, this.__nodeId, pricingPlans["pricingPlanId"], selectedPricingUnitId)
- .finally(() => unitButtons.setEnabled(true));
+ pricingUnitButtons.addListener("changeSelectedUnitId", e => {
+ if (this.isPatchNode()) {
+ pricingUnitButtons.setEnabled(false);
+ const pricingPlanId = this.getPricingPlanId();
+ const selectedPricingUnitId = e.getData();
+ this.self().patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId)
+ .finally(() => pricingUnitButtons.setEnabled(true));
+ }
});
}
})
@@ -110,6 +144,10 @@ qx.Class.define("osparc.study.NodePricingUnits", {
}
});
});
- }
+ },
+
+ getPricingUnits: function() {
+ return this.__pricingUnits;
+ },
}
});
diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
index 9922ec017e3..5b0fd30cadb 100644
--- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
+++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js
@@ -22,8 +22,11 @@ qx.Class.define("osparc.study.StudyOptions", {
this.base(arguments);
this._setLayout(new qx.ui.layout.VBox(15));
+ this.__buildLayout();
- this.setStudyId(studyId);
+ if (studyId) {
+ this.setStudyId(studyId);
+ }
},
properties: {
@@ -40,7 +43,14 @@ qx.Class.define("osparc.study.StudyOptions", {
nullable: true,
event: "changeWallet",
apply: "__applyWallet"
- }
+ },
+
+ patchStudy: {
+ check: "Boolean",
+ init: true,
+ nullable: false,
+ event: "changePatchStudy",
+ },
},
events: {
@@ -78,7 +88,31 @@ qx.Class.define("osparc.study.StudyOptions", {
});
box.setLayout(new qx.ui.layout.VBox(5));
return box;
- }
+ },
+
+ updateName: function(studyData, name) {
+ return osparc.info.StudyUtils.patchStudyData(studyData, "name", name)
+ .catch(err => {
+ console.error(err);
+ const msg = err.message || qx.locale.Manager.tr("Something went wrong Renaming");
+ osparc.FlashMessenger.logAs(msg, "ERROR");
+ });
+ },
+
+ updateWallet: function(studyId, walletId) {
+ const params = {
+ url: {
+ studyId,
+ walletId,
+ }
+ };
+ return osparc.data.Resources.fetch("studies", "selectWallet", params)
+ .catch(err => {
+ console.error(err);
+ const msg = err.message || qx.locale.Manager.tr("Error selecting Credit Account");
+ osparc.FlashMessenger.getInstance().logAs(msg, "ERROR");
+ });
+ },
},
members: {
@@ -147,6 +181,27 @@ qx.Class.define("osparc.study.StudyOptions", {
control = this.self().createGroupBox(this.tr("Tiers"));
this.getChildControl("options-layout").add(control);
break;
+ case "study-pricing-units": {
+ control = new osparc.study.StudyPricingUnits();
+ const loadingImage = this.getChildControl("loading-units-spinner");
+ const unitsBoxesLayout = this.getChildControl("services-resources-layout");
+ const unitsLoading = () => {
+ loadingImage.show();
+ unitsBoxesLayout.exclude();
+ };
+ const unitsReady = () => {
+ loadingImage.exclude();
+ unitsBoxesLayout.show();
+ control.getNodePricingUnits().forEach(nodePricingUnits => {
+ this.bind("patchStudy", nodePricingUnits, "patchNode");
+ });
+ };
+ unitsLoading();
+ control.addListener("loadingUnits", () => unitsLoading());
+ control.addListener("unitsReady", () => unitsReady());
+ unitsBoxesLayout.add(control);
+ break;
+ }
case "buttons-layout":
control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({
alignX: "right"
@@ -192,7 +247,7 @@ qx.Class.define("osparc.study.StudyOptions", {
])
.then(values => {
const studyData = values[0];
- this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData);
+ this.setStudyData(studyData);
if (values[1] && "walletId" in values[1]) {
this.__studyWalletId = values[1]["walletId"];
@@ -201,6 +256,16 @@ qx.Class.define("osparc.study.StudyOptions", {
});
},
+ setStudyData: function(studyData) {
+ this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData);
+
+ const titleField = this.getChildControl("title-field");
+ titleField.setValue(this.__studyData["name"]);
+
+ const studyPricingUnits = this.getChildControl("study-pricing-units");
+ studyPricingUnits.setStudyData(this.__studyData);
+ },
+
__applyWallet: function(wallet) {
if (wallet) {
const walletSelector = this.getChildControl("wallet-selector");
@@ -224,9 +289,6 @@ qx.Class.define("osparc.study.StudyOptions", {
const store = osparc.store.Store.getInstance();
const titleField = this.getChildControl("title-field");
- if (this.__studyData) {
- titleField.setValue(this.__studyData["name"]);
- }
titleField.addListener("appear", () => {
titleField.focus();
titleField.activate();
@@ -261,21 +323,7 @@ qx.Class.define("osparc.study.StudyOptions", {
},
__buildOptionsLayout: function() {
- const loadingImage = this.getChildControl("loading-units-spinner");
- const unitsBoxesLayout = this.getChildControl("services-resources-layout");
- const unitsLoading = () => {
- loadingImage.show();
- unitsBoxesLayout.exclude();
- };
- const unitsReady = () => {
- loadingImage.exclude();
- unitsBoxesLayout.show();
- };
- unitsLoading();
- const studyPricingUnits = new osparc.study.StudyPricingUnits(this.__studyData);
- studyPricingUnits.addListener("loadingUnits", () => unitsLoading());
- studyPricingUnits.addListener("unitsReady", () => unitsReady());
- unitsBoxesLayout.add(studyPricingUnits);
+ this.getChildControl("study-pricing-units");
},
__buildButtons: function() {
@@ -291,48 +339,34 @@ qx.Class.define("osparc.study.StudyOptions", {
const openButton = this.getChildControl("open-button");
openButton.setFetching(true);
- // first, update the name if necessary
- const titleSelection = this.getChildControl("title-field").getValue();
- if (this.__studyData && this.__studyData["name"] !== titleSelection) {
- await this.__updateName(this.__studyData, titleSelection);
- }
+ if (this.isPatchStudy()) {
+ // first, update the name if necessary
+ const titleSelection = this.getChildControl("title-field").getValue();
+ if (this.__studyData["name"] !== titleSelection) {
+ await this.self().updateName(this.__studyData, titleSelection);
+ }
- // second, update the wallet if necessary
- const store = osparc.store.Store.getInstance();
- const walletSelection = this.getChildControl("wallet-selector").getSelection();
- const studyId = this.getStudyId();
- if (studyId && walletSelection.length && walletSelection[0]["walletId"]) {
- const params = {
- url: {
- studyId,
- "walletId": walletSelection[0]["walletId"]
- }
- };
- osparc.data.Resources.fetch("studies", "selectWallet", params)
- .then(() => {
- store.setActiveWallet(this.getWallet());
- this.fireEvent("startStudy");
- })
- .catch(err => {
- console.error(err);
- const msg = err.message || this.tr("Error selecting Credit Account");
- osparc.FlashMessenger.getInstance().logAs(msg, "ERROR");
- })
- .finally(() => openButton.setFetching(false));
+ // second, update the wallet if necessary
+ const store = osparc.store.Store.getInstance();
+ const walletSelection = this.getChildControl("wallet-selector").getSelection();
+ if (walletSelection.length && walletSelection[0]["walletId"]) {
+ const studyId = this.getStudyId();
+ const walletId = walletSelection[0]["walletId"];
+ this.self().updateWallet(studyId, walletId)
+ .then(() => {
+ store.setActiveWallet(this.getWallet());
+ this.fireEvent("startStudy");
+ })
+ .finally(() => openButton.setFetching(false));
+ } else {
+ store.setActiveWallet(this.getWallet());
+ this.fireEvent("startStudy");
+ openButton.setFetching(false);
+ }
} else {
- store.setActiveWallet(this.getWallet());
this.fireEvent("startStudy");
openButton.setFetching(false);
}
},
-
- __updateName: function(studyData, name) {
- return osparc.info.StudyUtils.patchStudyData(studyData, "name", name)
- .catch(err => {
- console.error(err);
- const msg = this.tr("Something went wrong Renaming");
- osparc.FlashMessenger.logAs(msg, "ERROR");
- });
- }
}
});
diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js
index 793fee5cb34..e3e8514fbaf 100644
--- a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js
+++ b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js
@@ -25,9 +25,11 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
layout: new qx.ui.layout.VBox(5)
});
- this.__studyData = studyData;
+ this.__nodePricingUnits = [];
- this.__showPricingUnits();
+ if (studyData) {
+ this.setStudyData(studyData);
+ }
},
events: {
@@ -35,8 +37,20 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
"unitsReady": "qx.event.type.Event"
},
+ statics: {
+ includeInList: function(node) {
+ return !osparc.data.model.Node.isFrontend(node);
+ },
+ },
+
members: {
__studyData: null,
+ __nodePricingUnits: null,
+
+ setStudyData: function(studyData) {
+ this.__studyData = studyData;
+ this.__showPricingUnits();
+ },
__showPricingUnits: function() {
const unitsLoading = () => this.fireEvent("loadingUnits");
@@ -48,16 +62,20 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
const workbench = this.__studyData["workbench"];
Object.keys(workbench).forEach(nodeId => {
const node = workbench[nodeId];
- if (osparc.data.model.Node.isFrontend(node)) {
- return;
+ if (this.self().includeInList(node)) {
+ const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node);
+ this.__nodePricingUnits.push(nodePricingUnits);
+ this._add(nodePricingUnits);
+ promises.push(nodePricingUnits.showPricingUnits());
}
- const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node);
- this._add(nodePricingUnits);
- promises.push(nodePricingUnits.showPricingUnits());
});
}
Promise.all(promises)
.then(() => unitsAdded());
- }
+ },
+
+ getNodePricingUnits: function() {
+ return this.__nodePricingUnits;
+ },
}
});
diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js
index 0240d263e47..66ed40201f4 100644
--- a/services/static-webserver/client/source/class/osparc/study/Utils.js
+++ b/services/static-webserver/client/source/class/osparc/study/Utils.js
@@ -255,7 +255,7 @@ qx.Class.define("osparc.study.Utils", {
}, this);
task.addListener("resultReceived", e => {
const studyData = e.getData();
- resolve(studyData["uuid"]);
+ resolve(studyData);
}, this);
task.addListener("pollingError", e => {
const err = e.getData();
From 0781e6363311f069ff5931041a4a9172f93f6c47 Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Thu, 14 Nov 2024 12:16:35 +0100
Subject: [PATCH 17/17] =?UTF-8?q?=F0=9F=8E=A8Computation=20backend:=20refa?=
=?UTF-8?q?ctor=20director-v2=20internal=20computational=20scheduler=20to?=
=?UTF-8?q?=20be=20less=20resource=20heavy=20(#6696)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../src/servicelib/redis_utils.py | 36 ++-
.../service-library/tests/test_redis_utils.py | 127 ++++++--
.../api/dependencies/scheduler.py | 4 +-
.../modules/comp_scheduler/__init__.py | 34 +-
.../modules/comp_scheduler/_base_scheduler.py | 291 +++++++++++-------
.../modules/comp_scheduler/_dask_scheduler.py | 2 +-
.../comp_scheduler/_scheduler_factory.py | 52 ++--
.../modules/comp_scheduler/_task.py | 51 ---
.../modules/redis.py | 6 +
services/director-v2/tests/unit/_helpers.py | 11 -
...t_modules_comp_scheduler_dask_scheduler.py | 150 +++++----
11 files changed, 481 insertions(+), 283 deletions(-)
delete mode 100644 services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
diff --git a/packages/service-library/src/servicelib/redis_utils.py b/packages/service-library/src/servicelib/redis_utils.py
index 10f32ae5944..559349cbb0d 100644
--- a/packages/service-library/src/servicelib/redis_utils.py
+++ b/packages/service-library/src/servicelib/redis_utils.py
@@ -3,7 +3,7 @@
import logging
from collections.abc import Awaitable, Callable
from datetime import timedelta
-from typing import Any
+from typing import Any, ParamSpec, TypeVar
import arrow
@@ -12,10 +12,16 @@
_logger = logging.getLogger(__file__)
+P = ParamSpec("P")
+R = TypeVar("R")
+
def exclusive(
- redis: RedisClientSDK, *, lock_key: str, lock_value: bytes | str | None = None
-):
+ redis: RedisClientSDK | Callable[..., RedisClientSDK],
+ *,
+ lock_key: str | Callable[..., str],
+ lock_value: bytes | str | None = None,
+) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]:
"""
Define a method to run exclusively across
processes by leveraging a Redis Lock.
@@ -24,12 +30,30 @@ def exclusive(
redis: the redis client SDK
lock_key: a string as the name of the lock (good practice: app_name:lock_name)
lock_value: some additional data that can be retrieved by another client
+
+ Raises:
+ - ValueError if used incorrectly
+ - CouldNotAcquireLockError if the lock could not be acquired
"""
- def decorator(func):
+ if not lock_key:
+ msg = "lock_key cannot be empty string!"
+ raise ValueError(msg)
+
+ def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]:
@functools.wraps(func)
- async def wrapper(*args, **kwargs):
- async with redis.lock_context(lock_key=lock_key, lock_value=lock_value):
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+ redis_lock_key = (
+ lock_key(*args, **kwargs) if callable(lock_key) else lock_key
+ )
+ assert isinstance(redis_lock_key, str) # nosec
+
+ redis_client = redis(*args, **kwargs) if callable(redis) else redis
+ assert isinstance(redis_client, RedisClientSDK) # nosec
+
+ async with redis_client.lock_context(
+ lock_key=redis_lock_key, lock_value=lock_value
+ ):
return await func(*args, **kwargs)
return wrapper
diff --git a/packages/service-library/tests/test_redis_utils.py b/packages/service-library/tests/test_redis_utils.py
index f897fc7c399..26f749cd894 100644
--- a/packages/service-library/tests/test_redis_utils.py
+++ b/packages/service-library/tests/test_redis_utils.py
@@ -5,6 +5,7 @@
from contextlib import AbstractAsyncContextManager
from datetime import timedelta
from itertools import chain
+from typing import Awaitable
from unittest.mock import Mock
import arrow
@@ -32,39 +33,117 @@ async def _is_locked(redis_client_sdk: RedisClientSDK, lock_name: str) -> bool:
@pytest.fixture
def lock_name(faker: Faker) -> str:
- return faker.uuid4() # type: ignore
+ return faker.pystr()
+
+
+def _exclusive_sleeping_task(
+ redis_client_sdk: RedisClientSDK | Callable[..., RedisClientSDK],
+ lock_name: str | Callable[..., str],
+ sleep_duration: float,
+) -> Callable[..., Awaitable[float]]:
+ @exclusive(redis_client_sdk, lock_key=lock_name)
+ async def _() -> float:
+ resolved_client = (
+ redis_client_sdk() if callable(redis_client_sdk) else redis_client_sdk
+ )
+ resolved_lock_name = lock_name() if callable(lock_name) else lock_name
+ assert await _is_locked(resolved_client, resolved_lock_name)
+ await asyncio.sleep(sleep_duration)
+ assert await _is_locked(resolved_client, resolved_lock_name)
+ return sleep_duration
+
+ return _
+
+
+@pytest.fixture
+def sleep_duration(faker: Faker) -> float:
+ return faker.pyfloat(positive=True, min_value=0.2, max_value=0.8)
-async def _contained_client(
+async def test_exclusive_decorator(
get_redis_client_sdk: Callable[
[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
],
lock_name: str,
- task_duration: float,
-) -> None:
- async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk:
- assert not await _is_locked(redis_client_sdk, lock_name)
-
- @exclusive(redis_client_sdk, lock_key=lock_name)
- async def _some_task() -> None:
- assert await _is_locked(redis_client_sdk, lock_name)
- await asyncio.sleep(task_duration)
- assert await _is_locked(redis_client_sdk, lock_name)
-
- await _some_task()
+ sleep_duration: float,
+):
- assert not await _is_locked(redis_client_sdk, lock_name)
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client:
+ for _ in range(3):
+ assert (
+ await _exclusive_sleeping_task(
+ redis_client, lock_name, sleep_duration
+ )()
+ == sleep_duration
+ )
-@pytest.mark.parametrize("task_duration", [0.1, 1, 2])
-async def test_exclusive_sequentially(
+async def test_exclusive_decorator_with_key_builder(
get_redis_client_sdk: Callable[
[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
],
lock_name: str,
- task_duration: float,
+ sleep_duration: float,
):
- await _contained_client(get_redis_client_sdk, lock_name, task_duration)
+ def _get_lock_name(*args, **kwargs) -> str:
+ assert args is not None
+ assert kwargs is not None
+ return lock_name
+
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client:
+ for _ in range(3):
+ assert (
+ await _exclusive_sleeping_task(
+ redis_client, _get_lock_name, sleep_duration
+ )()
+ == sleep_duration
+ )
+
+
+async def test_exclusive_decorator_with_client_builder(
+ get_redis_client_sdk: Callable[
+ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
+ ],
+ lock_name: str,
+ sleep_duration: float,
+):
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client:
+
+ def _get_redis_client_builder(*args, **kwargs) -> RedisClientSDK:
+ assert args is not None
+ assert kwargs is not None
+ return redis_client
+
+ for _ in range(3):
+ assert (
+ await _exclusive_sleeping_task(
+ _get_redis_client_builder, lock_name, sleep_duration
+ )()
+ == sleep_duration
+ )
+
+
+async def _acquire_lock_and_exclusively_sleep(
+ get_redis_client_sdk: Callable[
+ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]
+ ],
+ lock_name: str | Callable[..., str],
+ sleep_duration: float,
+) -> None:
+ async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk:
+ redis_lock_name = lock_name() if callable(lock_name) else lock_name
+ assert not await _is_locked(redis_client_sdk, redis_lock_name)
+
+ @exclusive(redis_client_sdk, lock_key=lock_name)
+ async def _() -> float:
+ assert await _is_locked(redis_client_sdk, redis_lock_name)
+ await asyncio.sleep(sleep_duration)
+ assert await _is_locked(redis_client_sdk, redis_lock_name)
+ return sleep_duration
+
+ assert await _() == sleep_duration
+
+ assert not await _is_locked(redis_client_sdk, redis_lock_name)
async def test_exclusive_parallel_lock_is_released_and_reacquired(
@@ -76,17 +155,19 @@ async def test_exclusive_parallel_lock_is_released_and_reacquired(
parallel_tasks = 10
results = await logged_gather(
*[
- _contained_client(get_redis_client_sdk, lock_name, task_duration=0.1)
+ _acquire_lock_and_exclusively_sleep(
+ get_redis_client_sdk, lock_name, sleep_duration=0.1
+ )
for _ in range(parallel_tasks)
],
- reraise=False
+ reraise=False,
)
assert results.count(None) == 1
assert [isinstance(x, CouldNotAcquireLockError) for x in results].count(
True
) == parallel_tasks - 1
- # check lock is being released
+ # check lock is released
async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk:
assert not await _is_locked(redis_client_sdk, lock_name)
@@ -168,7 +249,7 @@ async def test_start_exclusive_periodic_task_parallel_all_finish(
_assert_task_completes_once(get_redis_client_sdk, stop_after=60)
for _ in range(parallel_tasks)
],
- reraise=False
+ reraise=False,
)
# check no error occurred
diff --git a/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py b/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py
index a0903608789..aa01af1f34b 100644
--- a/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py
@@ -1,3 +1,5 @@
+from typing import Annotated
+
from fastapi import Depends, FastAPI, Request
from ...core.settings import ComputationalBackendSettings
@@ -11,7 +13,7 @@ def get_scheduler(request: Request) -> BaseCompScheduler:
def get_scheduler_settings(
- app: FastAPI = Depends(get_app),
+ app: Annotated[FastAPI, Depends(get_app)]
) -> ComputationalBackendSettings:
settings: ComputationalBackendSettings = (
app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py
index 1eb6c3dab10..d06c37457b7 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py
@@ -1,7 +1,38 @@
+import logging
+from collections.abc import Callable, Coroutine
+from typing import Any, cast
+
from fastapi import FastAPI
+from servicelib.logging_utils import log_context
+from . import _scheduler_factory
from ._base_scheduler import BaseCompScheduler
-from ._task import on_app_shutdown, on_app_startup
+
+_logger = logging.getLogger(__name__)
+
+
+def on_app_startup(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
+ async def start_scheduler() -> None:
+ with log_context(
+ _logger, level=logging.INFO, msg="starting computational scheduler"
+ ):
+ app.state.scheduler = scheduler = await _scheduler_factory.create_from_db(
+ app
+ )
+ scheduler.recover_scheduling()
+
+ return start_scheduler
+
+
+def on_app_shutdown(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
+ async def stop_scheduler() -> None:
+ await get_scheduler(app).shutdown()
+
+ return stop_scheduler
+
+
+def get_scheduler(app: FastAPI) -> BaseCompScheduler:
+ return cast(BaseCompScheduler, app.state.scheduler)
def setup(app: FastAPI):
@@ -12,4 +43,5 @@ def setup(app: FastAPI):
__all__: tuple[str, ...] = (
"setup",
"BaseCompScheduler",
+ "get_scheduler",
)
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
index cae539596d4..097afd95288 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py
@@ -12,7 +12,9 @@
"""
import asyncio
+import contextlib
import datetime
+import functools
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
@@ -29,9 +31,12 @@
from models_library.users import UserID
from networkx.classes.reportviews import InDegreeView
from pydantic import PositiveInt
+from servicelib.background_task import start_periodic_task, stop_periodic_task
from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE
+from servicelib.logging_utils import log_context
from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient
-from servicelib.utils import limited_gather
+from servicelib.redis import CouldNotAcquireLockError, RedisClientSDK
+from servicelib.redis_utils import exclusive
from ...constants import UNDEFINED_STR_METADATA
from ...core.errors import (
@@ -76,6 +81,10 @@
_Previous = CompTaskAtDB
_Current = CompTaskAtDB
_MAX_WAITING_FOR_CLUSTER_TIMEOUT_IN_MIN: Final[int] = 10
+_SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(seconds=5)
+_TASK_NAME_TEMPLATE: Final[
+ str
+] = "computational-scheduler-{user_id}:{project_id}:{iteration}"
@dataclass(frozen=True, slots=True)
@@ -134,6 +143,12 @@ class ScheduledPipelineParams:
mark_for_cancellation: datetime.datetime | None
use_on_demand_clusters: bool
+ scheduler_task: asyncio.Task | None = None
+ scheduler_waker: asyncio.Event = field(default_factory=asyncio.Event)
+
+ def wake_up(self) -> None:
+ self.scheduler_waker.set()
+
@dataclass
class BaseCompScheduler(ABC):
@@ -146,6 +161,7 @@ class BaseCompScheduler(ABC):
rabbitmq_rpc_client: RabbitMQRPCClient
settings: ComputationalBackendSettings
service_runtime_heartbeat_interval: datetime.timedelta
+ redis_client: RedisClientSDK
async def run_new_pipeline(
self,
@@ -178,7 +194,7 @@ async def run_new_pipeline(
)
self.scheduled_pipelines[
(user_id, project_id, new_run.iteration)
- ] = ScheduledPipelineParams(
+ ] = pipeline_params = ScheduledPipelineParams(
cluster_id=cluster_id,
run_metadata=new_run.metadata,
use_on_demand_clusters=use_on_demand_clusters,
@@ -191,8 +207,8 @@ async def run_new_pipeline(
log=f"Project pipeline scheduled using {'on-demand clusters' if use_on_demand_clusters else 'pre-defined clusters'}, starting soon...",
log_level=logging.INFO,
)
- # ensure the scheduler starts right away
- self._wake_up_scheduler_now()
+
+ self._start_scheduling(pipeline_params, user_id, project_id, new_run.iteration)
async def stop_pipeline(
self, user_id: UserID, project_id: ProjectID, iteration: int | None = None
@@ -224,29 +240,76 @@ async def stop_pipeline(
(user_id, project_id, selected_iteration)
].mark_for_cancellation = updated_comp_run.cancelled
# ensure the scheduler starts right away
- self._wake_up_scheduler_now()
+ self.scheduled_pipelines[
+ (user_id, project_id, selected_iteration)
+ ].wake_up()
- async def schedule_all_pipelines(self) -> None:
- self.wake_up_event.clear()
- # if one of the task throws, the other are NOT cancelled which is what we want
- await limited_gather(
+ def recover_scheduling(self) -> None:
+ for (
+ user_id,
+ project_id,
+ iteration,
+ ), params in self.scheduled_pipelines.items():
+ self._start_scheduling(params, user_id, project_id, iteration)
+
+ async def shutdown(self) -> None:
+ # cancel all current scheduling processes
+ await asyncio.gather(
*(
- self._schedule_pipeline(
+ stop_periodic_task(p.scheduler_task, timeout=3)
+ for p in self.scheduled_pipelines.values()
+ if p.scheduler_task
+ ),
+ return_exceptions=True,
+ )
+
+ def _get_last_iteration(self, user_id: UserID, project_id: ProjectID) -> Iteration:
+ # if no iteration given find the latest one in the list
+ possible_iterations = {
+ it
+ for u_id, p_id, it in self.scheduled_pipelines
+ if u_id == user_id and p_id == project_id
+ }
+ if not possible_iterations:
+ msg = f"There are no pipeline scheduled for {user_id}:{project_id}"
+ raise SchedulerError(msg)
+ return max(possible_iterations)
+
+ def _start_scheduling(
+ self,
+ pipeline_params: ScheduledPipelineParams,
+ user_id: UserID,
+ project_id: ProjectID,
+ iteration: Iteration,
+ ) -> None:
+ async def _exclusive_safe_schedule_pipeline(
+ *,
+ user_id: UserID,
+ project_id: ProjectID,
+ iteration: Iteration,
+ pipeline_params: ScheduledPipelineParams,
+ ) -> None:
+ with contextlib.suppress(CouldNotAcquireLockError):
+ await self._schedule_pipeline(
user_id=user_id,
project_id=project_id,
iteration=iteration,
pipeline_params=pipeline_params,
)
- for (
- user_id,
- project_id,
- iteration,
- ), pipeline_params in self.scheduled_pipelines.items()
+
+ pipeline_params.scheduler_task = start_periodic_task(
+ functools.partial(
+ _exclusive_safe_schedule_pipeline,
+ user_id=user_id,
+ project_id=project_id,
+ iteration=iteration,
+ pipeline_params=pipeline_params,
+ ),
+ interval=_SCHEDULER_INTERVAL,
+ task_name=_TASK_NAME_TEMPLATE.format(
+ user_id=user_id, project_id=project_id, iteration=iteration
),
- reraise=False,
- log=_logger,
- limit=40,
- tasks_group_prefix="computational-scheduled-pipeline",
+ early_wake_up_event=pipeline_params.scheduler_waker,
)
async def _get_pipeline_dag(self, project_id: ProjectID) -> nx.DiGraph:
@@ -610,6 +673,22 @@ async def _process_completed_tasks(
) -> None:
...
+ @staticmethod
+ def _build_exclusive_lock_key(*args, **kwargs) -> str:
+ assert args # nosec
+ return f"{kwargs['user_id']}:{kwargs['project_id']}:{kwargs['iteration']}"
+
+ @staticmethod
+ def _redis_client_getter(*args, **kwargs) -> RedisClientSDK:
+ assert kwargs # nosec
+ zelf = args[0]
+ assert isinstance(zelf, BaseCompScheduler) # nosec
+ return zelf.redis_client
+
+ @exclusive(
+ redis=_redis_client_getter,
+ lock_key=_build_exclusive_lock_key,
+ )
async def _schedule_pipeline(
self,
*,
@@ -618,98 +697,99 @@ async def _schedule_pipeline(
iteration: PositiveInt,
pipeline_params: ScheduledPipelineParams,
) -> None:
- _logger.debug(
- "checking run of project [%s:%s] for user [%s]",
- f"{project_id=}",
- f"{iteration=}",
- f"{user_id=}",
- )
- dag: nx.DiGraph = nx.DiGraph()
- try:
- dag = await self._get_pipeline_dag(project_id)
- # 1. Update our list of tasks with data from backend (state, results)
- await self._update_states_from_comp_backend(
- user_id, project_id, iteration, dag, pipeline_params=pipeline_params
- )
- # 2. Any task following a FAILED task shall be ABORTED
- comp_tasks = await self._set_states_following_failed_to_aborted(
- project_id, dag
- )
- # 3. do we want to stop the pipeline now?
- if pipeline_params.mark_for_cancellation:
- await self._schedule_tasks_to_stop(
- user_id, project_id, comp_tasks, pipeline_params
+ with log_context(
+ _logger,
+ level=logging.INFO,
+ msg=f"scheduling pipeline {user_id=}:{project_id=}:{iteration=}",
+ ):
+ dag: nx.DiGraph = nx.DiGraph()
+ try:
+ dag = await self._get_pipeline_dag(project_id)
+ # 1. Update our list of tasks with data from backend (state, results)
+ await self._update_states_from_comp_backend(
+ user_id, project_id, iteration, dag, pipeline_params=pipeline_params
)
- else:
- # let's get the tasks to schedule then
- comp_tasks = await self._schedule_tasks_to_start(
- user_id=user_id,
- project_id=project_id,
- comp_tasks=comp_tasks,
- dag=dag,
- pipeline_params=pipeline_params,
+ # 2. Any task following a FAILED task shall be ABORTED
+ comp_tasks = await self._set_states_following_failed_to_aborted(
+ project_id, dag
+ )
+ # 3. do we want to stop the pipeline now?
+ if pipeline_params.mark_for_cancellation:
+ await self._schedule_tasks_to_stop(
+ user_id, project_id, comp_tasks, pipeline_params
+ )
+ else:
+ # let's get the tasks to schedule then
+ comp_tasks = await self._schedule_tasks_to_start(
+ user_id=user_id,
+ project_id=project_id,
+ comp_tasks=comp_tasks,
+ dag=dag,
+ pipeline_params=pipeline_params,
+ )
+ # 4. timeout if waiting for cluster has been there for more than X minutes
+ comp_tasks = await self._timeout_if_waiting_for_cluster_too_long(
+ user_id, project_id, comp_tasks
+ )
+ # 5. send a heartbeat
+ await self._send_running_tasks_heartbeat(
+ user_id, project_id, iteration, dag
)
- # 4. timeout if waiting for cluster has been there for more than X minutes
- comp_tasks = await self._timeout_if_waiting_for_cluster_too_long(
- user_id, project_id, comp_tasks
- )
- # 5. send a heartbeat
- await self._send_running_tasks_heartbeat(
- user_id, project_id, iteration, dag
- )
- # 6. Update the run result
- pipeline_result = await self._update_run_result_from_tasks(
- user_id, project_id, iteration, comp_tasks
- )
+ # 6. Update the run result
+ pipeline_result = await self._update_run_result_from_tasks(
+ user_id, project_id, iteration, comp_tasks
+ )
- # 7. Are we done scheduling that pipeline?
- if not dag.nodes() or pipeline_result in COMPLETED_STATES:
- # there is nothing left, the run is completed, we're done here
+ # 7. Are we done scheduling that pipeline?
+ if not dag.nodes() or pipeline_result in COMPLETED_STATES:
+ # there is nothing left, the run is completed, we're done here
+ self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
+ _logger.info(
+ "pipeline %s scheduling completed with result %s",
+ f"{project_id=}",
+ f"{pipeline_result=}",
+ )
+ assert pipeline_params.scheduler_task is not None # nosec
+ pipeline_params.scheduler_task.cancel()
+ except PipelineNotFoundError:
+ _logger.warning(
+ "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler",
+ f"{project_id=}",
+ )
+ await self._set_run_result(
+ user_id, project_id, iteration, RunningState.ABORTED
+ )
self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- _logger.info(
- "pipeline %s scheduling completed with result %s",
+ except InvalidPipelineError as exc:
+ _logger.warning(
+ "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s",
f"{project_id=}",
- f"{pipeline_result=}",
+ exc,
)
- except PipelineNotFoundError:
- _logger.warning(
- "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler",
- f"{project_id=}",
- )
- await self._set_run_result(
- user_id, project_id, iteration, RunningState.ABORTED
- )
- self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- except InvalidPipelineError as exc:
- _logger.warning(
- "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s",
- f"{project_id=}",
- exc,
- )
- await self._set_run_result(
- user_id, project_id, iteration, RunningState.ABORTED
- )
- self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError):
- _logger.exception(
- "Unexpected error while connecting with computational backend, aborting pipeline"
- )
- tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks(
- project_id, dag
- )
- comp_tasks_repo = CompTasksRepository(self.db_engine)
- await comp_tasks_repo.update_project_tasks_state(
- project_id,
- [t.node_id for t in tasks.values()],
- RunningState.FAILED,
- )
- await self._set_run_result(
- user_id, project_id, iteration, RunningState.FAILED
- )
- self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
- except ComputationalBackendNotConnectedError:
- _logger.exception("Computational backend is not connected!")
+ await self._set_run_result(
+ user_id, project_id, iteration, RunningState.ABORTED
+ )
+ self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
+ except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError):
+ _logger.exception(
+ "Unexpected error while connecting with computational backend, aborting pipeline"
+ )
+ tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks(
+ project_id, dag
+ )
+ comp_tasks_repo = CompTasksRepository(self.db_engine)
+ await comp_tasks_repo.update_project_tasks_state(
+ project_id,
+ [t.node_id for t in tasks.values()],
+ RunningState.FAILED,
+ )
+ await self._set_run_result(
+ user_id, project_id, iteration, RunningState.FAILED
+ )
+ self.scheduled_pipelines.pop((user_id, project_id, iteration), None)
+ except ComputationalBackendNotConnectedError:
+ _logger.exception("Computational backend is not connected!")
async def _schedule_tasks_to_stop(
self,
@@ -910,6 +990,3 @@ async def _timeout_if_waiting_for_cluster_too_long(
log_level=logging.ERROR,
)
return comp_tasks
-
- def _wake_up_scheduler_now(self) -> None:
- self.wake_up_event.set()
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py
index 51fb3b1a3fb..512df1b1712 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py
@@ -124,7 +124,7 @@ async def _start_tasks(
cluster_id=pipeline_params.cluster_id,
tasks={node_id: task.image},
hardware_info=task.hardware_info,
- callback=self._wake_up_scheduler_now,
+ callback=pipeline_params.wake_up,
metadata=pipeline_params.run_metadata,
)
for node_id, task in scheduled_tasks.items()
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
index f8b648eaf48..4f7812816cc 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py
@@ -2,6 +2,8 @@
from fastapi import FastAPI
from models_library.clusters import DEFAULT_CLUSTER_ID
+from servicelib.logging_utils import log_context
+from settings_library.redis import RedisDatabase
from ...core.errors import ConfigurationError
from ...core.settings import AppSettings
@@ -10,10 +12,11 @@
from ..dask_clients_pool import DaskClientsPool
from ..db.repositories.comp_runs import CompRunsRepository
from ..rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_client
+from ..redis import get_redis_client_manager
from ._base_scheduler import BaseCompScheduler, ScheduledPipelineParams
from ._dask_scheduler import DaskScheduler
-logger = logging.getLogger(__name__)
+_logger = logging.getLogger(__name__)
async def create_from_db(app: FastAPI) -> BaseCompScheduler:
@@ -28,29 +31,32 @@ async def create_from_db(app: FastAPI) -> BaseCompScheduler:
filter_by_state=SCHEDULED_STATES
)
- logger.debug(
+ _logger.debug(
"Following scheduled comp_runs found still to be scheduled: %s",
runs if runs else "NONE",
)
- logger.info("Creating Dask-based scheduler...")
- app_settings: AppSettings = app.state.settings
- return DaskScheduler(
- settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND,
- dask_clients_pool=DaskClientsPool.instance(app),
- rabbitmq_client=get_rabbitmq_client(app),
- rabbitmq_rpc_client=get_rabbitmq_rpc_client(app),
- db_engine=db_engine,
- scheduled_pipelines={
- (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams(
- cluster_id=(
- r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID
- ),
- run_metadata=r.metadata,
- mark_for_cancellation=r.cancelled,
- use_on_demand_clusters=r.use_on_demand_clusters,
- )
- for r in runs
- },
- service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT,
- )
+ with log_context(
+ _logger, logging.INFO, msg="Creating Dask-based computational scheduler"
+ ):
+ app_settings: AppSettings = app.state.settings
+ return DaskScheduler(
+ settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND,
+ dask_clients_pool=DaskClientsPool.instance(app),
+ rabbitmq_client=get_rabbitmq_client(app),
+ rabbitmq_rpc_client=get_rabbitmq_rpc_client(app),
+ redis_client=get_redis_client_manager(app).client(RedisDatabase.LOCKS),
+ db_engine=db_engine,
+ scheduled_pipelines={
+ (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams(
+ cluster_id=(
+ r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID
+ ),
+ run_metadata=r.metadata,
+ mark_for_cancellation=r.cancelled,
+ use_on_demand_clusters=r.use_on_demand_clusters,
+ )
+ for r in runs
+ },
+ service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT,
+ )
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
deleted file mode 100644
index 989b310687c..00000000000
--- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import datetime
-import logging
-from collections.abc import Callable, Coroutine
-from typing import Any, Final
-
-from fastapi import FastAPI
-from servicelib.background_task import start_periodic_task, stop_periodic_task
-from servicelib.logging_utils import log_context
-from servicelib.redis import RedisClientsManager
-from servicelib.redis_utils import exclusive
-from settings_library.redis import RedisDatabase
-
-from ..._meta import APP_NAME
-from . import _scheduler_factory
-
-_logger = logging.getLogger(__name__)
-
-_COMPUTATIONAL_SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(
- seconds=5
-)
-_TASK_NAME: Final[str] = "computational services scheduler"
-
-
-def on_app_startup(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
- async def start_scheduler() -> None:
- with log_context(
- _logger, level=logging.INFO, msg="starting computational scheduler"
- ):
- redis_clients_manager: RedisClientsManager = app.state.redis_clients_manager
- lock_key = f"{APP_NAME}:computational_scheduler"
- app.state.scheduler = scheduler = await _scheduler_factory.create_from_db(
- app
- )
- app.state.computational_scheduler_task = start_periodic_task(
- exclusive(
- redis_clients_manager.client(RedisDatabase.LOCKS),
- lock_key=lock_key,
- )(scheduler.schedule_all_pipelines),
- interval=_COMPUTATIONAL_SCHEDULER_INTERVAL,
- task_name=_TASK_NAME,
- early_wake_up_event=scheduler.wake_up_event,
- )
-
- return start_scheduler
-
-
-def on_app_shutdown(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]:
- async def stop_scheduler() -> None:
- await stop_periodic_task(app.state.computational_scheduler_task)
-
- return stop_scheduler
diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py
index e7da01afef7..273061cb188 100644
--- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py
+++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py
@@ -1,3 +1,5 @@
+from typing import cast
+
from fastapi import FastAPI
from servicelib.redis import RedisClientsManager, RedisManagerDBConfig
from settings_library.redis import RedisDatabase
@@ -29,3 +31,7 @@ async def on_shutdown() -> None:
app.add_event_handler("startup", on_startup)
app.add_event_handler("shutdown", on_shutdown)
+
+
+def get_redis_client_manager(app: FastAPI) -> RedisClientsManager:
+ return cast(RedisClientsManager, app.state.redis_clients_manager)
diff --git a/services/director-v2/tests/unit/_helpers.py b/services/director-v2/tests/unit/_helpers.py
index 2654c63a3e1..779d6cdd117 100644
--- a/services/director-v2/tests/unit/_helpers.py
+++ b/services/director-v2/tests/unit/_helpers.py
@@ -1,4 +1,3 @@
-import asyncio
from dataclasses import dataclass
from typing import Any
@@ -11,9 +10,6 @@
from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB
from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB
-from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import (
- BaseCompScheduler,
-)
@dataclass
@@ -28,13 +24,6 @@ class RunningProject(PublishedProject):
runs: CompRunsAtDB
-async def trigger_comp_scheduler(scheduler: BaseCompScheduler) -> None:
- # trigger the scheduler
- scheduler._wake_up_scheduler_now() # pylint: disable=protected-access # noqa: SLF001
- # let the scheduler be actually triggered
- await asyncio.sleep(1)
-
-
async def set_comp_task_state(
aiopg_engine: aiopg.sa.engine.Engine, node_id: str, state: StateType
) -> None:
diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
index f9e5ff33c4b..1df1ae09d39 100644
--- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
+++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py
@@ -45,6 +45,7 @@
from pytest_mock.plugin import MockerFixture
from pytest_simcore.helpers.typing_env import EnvVarsDict
from servicelib.rabbitmq import RabbitMQClient
+from servicelib.redis import CouldNotAcquireLockError
from settings_library.rabbit import RabbitSettings
from settings_library.redis import RedisSettings
from simcore_postgres_database.models.comp_runs import comp_runs
@@ -66,8 +67,12 @@
from simcore_service_director_v2.models.comp_runs import CompRunsAtDB, RunMetadataDict
from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB, Image
from simcore_service_director_v2.models.dask_subsystem import DaskClientTaskState
-from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import (
+from simcore_service_director_v2.modules.comp_scheduler import (
BaseCompScheduler,
+ get_scheduler,
+)
+from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import (
+ ScheduledPipelineParams,
)
from simcore_service_director_v2.modules.comp_scheduler._dask_scheduler import (
DaskScheduler,
@@ -155,8 +160,38 @@ async def _assert_comp_tasks_db(
), f"{expected_progress=}, found: {[t.progress for t in tasks]}"
-async def run_comp_scheduler(scheduler: BaseCompScheduler) -> None:
- await scheduler.schedule_all_pipelines()
+async def schedule_all_pipelines(scheduler: BaseCompScheduler) -> None:
+ # NOTE: we take a copy of the pipelines, as this could change quickly if there are
+ # misconfigured pipelines that would be removed from the scheduler
+ # NOTE: we simulate multiple dv-2 replicas by running several times
+ # the same pipeline scheduling
+ local_pipelines = deepcopy(scheduler.scheduled_pipelines)
+ results = await asyncio.gather(
+ *(
+ scheduler._schedule_pipeline( # noqa: SLF001
+ user_id=user_id,
+ project_id=project_id,
+ iteration=iteration,
+ pipeline_params=params,
+ )
+ for _ in range(3)
+ for (
+ user_id,
+ project_id,
+ iteration,
+ ), params in local_pipelines.items()
+ ),
+ return_exceptions=True,
+ )
+ # we should have exceptions 2/3 of the time
+ could_not_acquire_lock_count = sum(
+ isinstance(r, CouldNotAcquireLockError) for r in results
+ )
+ total_results_count = len(results)
+
+ # Check if 2/3 of the results are CouldNotAcquireLockError
+ # checks that scheduling is done exclusively
+ assert could_not_acquire_lock_count == (2 / 3) * total_results_count
@pytest.fixture
@@ -185,11 +220,11 @@ def minimal_dask_scheduler_config(
def scheduler(
minimal_dask_scheduler_config: None,
aiopg_engine: aiopg.sa.engine.Engine,
- # dask_spec_local_cluster: SpecCluster,
minimal_app: FastAPI,
) -> BaseCompScheduler:
- assert minimal_app.state.scheduler is not None
- return minimal_app.state.scheduler
+ scheduler = get_scheduler(minimal_app)
+ assert scheduler is not None
+ return scheduler
@pytest.fixture
@@ -220,16 +255,21 @@ def mocked_clean_task_output_fct(mocker: MockerFixture) -> mock.MagicMock:
@pytest.fixture
-def with_disabled_scheduler_task(mocker: MockerFixture) -> None:
+def with_disabled_auto_scheduling(mocker: MockerFixture) -> mock.MagicMock:
"""disables the scheduler task, note that it needs to be triggered manually then"""
- mocker.patch(
- "simcore_service_director_v2.modules.comp_scheduler._task.start_periodic_task",
- autospec=True,
- )
- mocker.patch(
- "simcore_service_director_v2.modules.comp_scheduler._task.stop_periodic_task",
+ def _fake_starter(
+ self: BaseCompScheduler,
+ pipeline_params: ScheduledPipelineParams,
+ *args,
+ **kwargs,
+ ) -> None:
+ pipeline_params.scheduler_task = mocker.MagicMock()
+
+ return mocker.patch(
+ "simcore_service_director_v2.modules.comp_scheduler._base_scheduler.BaseCompScheduler._start_scheduling",
autospec=True,
+ side_effect=_fake_starter,
)
@@ -258,7 +298,7 @@ async def test_scheduler_gracefully_starts_and_stops(
minimal_app: FastAPI,
):
# check it started correctly
- assert minimal_app.state.computational_scheduler_task is not None
+ assert get_scheduler(minimal_app) is not None
@pytest.mark.parametrize(
@@ -287,7 +327,7 @@ def test_scheduler_raises_exception_for_missing_dependencies(
async def test_empty_pipeline_is_not_scheduled(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
registered_user: Callable[..., dict[str, Any]],
project: Callable[..., Awaitable[ProjectAtDB]],
@@ -319,9 +359,6 @@ async def test_empty_pipeline_is_not_scheduled(
use_on_demand_clusters=False,
)
assert len(scheduler.scheduled_pipelines) == 0
- assert (
- scheduler.wake_up_event.is_set() is False
- ), "the scheduler was woken up on an empty pipeline!"
# check the database is empty
async with aiopg_engine.acquire() as conn:
result = await conn.scalar(
@@ -334,7 +371,7 @@ async def test_empty_pipeline_is_not_scheduled(
async def test_misconfigured_pipeline_is_not_scheduled(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
registered_user: Callable[..., dict[str, Any]],
project: Callable[..., Awaitable[ProjectAtDB]],
@@ -361,9 +398,6 @@ async def test_misconfigured_pipeline_is_not_scheduled(
use_on_demand_clusters=False,
)
assert len(scheduler.scheduled_pipelines) == 1
- assert (
- scheduler.wake_up_event.is_set() is True
- ), "the scheduler was NOT woken up on the scheduled pipeline!"
for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items():
assert u_id == user["id"]
assert p_id == sleepers_project.uuid
@@ -380,7 +414,7 @@ async def test_misconfigured_pipeline_is_not_scheduled(
run_entry = CompRunsAtDB.parse_obj(await result.first())
assert run_entry.result == RunningState.PUBLISHED
# let the scheduler kick in
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# check the scheduled pipelines is again empty since it's misconfigured
assert len(scheduler.scheduled_pipelines) == 0
# check the database entry is correctly updated
@@ -412,9 +446,6 @@ async def _assert_start_pipeline(
use_on_demand_clusters=False,
)
assert len(scheduler.scheduled_pipelines) == 1, "the pipeline is not scheduled!"
- assert (
- scheduler.wake_up_event.is_set() is True
- ), "the scheduler was NOT woken up on the scheduled pipeline!"
for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items():
assert u_id == published_project.project.prj_owner
assert p_id == published_project.project.uuid
@@ -434,7 +465,7 @@ async def _assert_start_pipeline(
return exp_published_tasks
-async def _assert_schedule_pipeline_PENDING(
+async def _assert_schedule_pipeline_PENDING( # noqa: N802
aiopg_engine,
published_project: PublishedProject,
published_tasks: list[CompTaskAtDB],
@@ -452,7 +483,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
return [DaskClientTaskState.PENDING for job_id in job_ids]
mocked_dask_client.get_tasks_status.side_effect = _return_tasks_pending
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
_assert_dask_client_correctly_initialized(mocked_dask_client, scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PUBLISHED)
await _assert_comp_tasks_db(
@@ -471,6 +502,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
expected_progress=None, # since we bypass the API entrypoint this is correct
)
# tasks were send to the backend
+ assert published_project.project.prj_owner is not None
mocked_dask_client.send_computation_tasks.assert_has_calls(
calls=[
mock.call(
@@ -478,7 +510,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
project_id=published_project.project.uuid,
cluster_id=DEFAULT_CLUSTER_ID,
tasks={f"{p.node_id}": p.image},
- callback=scheduler._wake_up_scheduler_now, # noqa: SLF001
+ callback=mock.ANY,
metadata=mock.ANY,
hardware_info=mock.ANY,
)
@@ -490,7 +522,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState]
mocked_dask_client.get_tasks_status.assert_not_called()
mocked_dask_client.get_task_result.assert_not_called()
# there is a second run of the scheduler to move comp_runs to pending, the rest does not change
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PENDING)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -616,7 +648,7 @@ async def _trigger_progress_event(
@pytest.mark.acceptance_test()
async def test_proper_pipeline_is_scheduled( # noqa: PLR0915
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
@@ -661,7 +693,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
mocked_dask_client.get_tasks_status.side_effect = _return_1st_task_running
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PENDING)
await _assert_comp_tasks_db(
@@ -707,7 +739,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
node_id=exp_started_task.node_id,
)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# comp_run, the comp_task switch to STARTED
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
@@ -771,7 +803,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData:
return TaskOutputData.parse_obj({"out_1": None, "out_2": 45})
mocked_dask_client.get_task_result.side_effect = _return_random_task_result
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -819,7 +851,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData:
tasks={
f"{next_pending_task.node_id}": next_pending_task.image,
},
- callback=scheduler._wake_up_scheduler_now, # noqa: SLF001
+ callback=mock.ANY,
metadata=mock.ANY,
hardware_info=mock.ANY,
)
@@ -866,7 +898,7 @@ async def _return_2nd_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
project_id=exp_started_task.project_id,
node_id=exp_started_task.node_id,
)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -908,7 +940,7 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[DaskClientTaskStat
mocked_dask_client.get_tasks_status.side_effect = _return_2nd_task_failed
mocked_dask_client.get_task_result.side_effect = None
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED)
await _assert_comp_tasks_db(
aiopg_engine,
@@ -955,7 +987,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta
mocked_dask_client.get_task_result.side_effect = _return_random_task_result
# trigger the scheduler, it should switch to FAILED, as we are done
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.FAILED)
await _assert_comp_tasks_db(
@@ -991,7 +1023,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta
async def test_task_progress_triggers(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
@@ -1054,7 +1086,7 @@ async def test_task_progress_triggers(
],
)
async def test_handling_of_disconnected_dask_scheduler(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
@@ -1098,7 +1130,7 @@ async def test_handling_of_disconnected_dask_scheduler(
project_id=published_project.project.uuid,
)
# we ensure the scheduler was run
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# after this step the tasks are marked as ABORTED
await _assert_comp_tasks_db(
aiopg_engine,
@@ -1112,7 +1144,7 @@ async def test_handling_of_disconnected_dask_scheduler(
expected_progress=1,
)
# then we have another scheduler run
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# now the run should be ABORTED
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.ABORTED)
@@ -1197,7 +1229,7 @@ class RebootState:
],
)
async def test_handling_scheduling_after_reboot(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
aiopg_engine: aiopg.sa.engine.Engine,
running_project: RunningProject,
@@ -1222,7 +1254,7 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData:
mocked_dask_client.get_task_result.side_effect = mocked_get_task_result
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# the status will be called once for all RUNNING tasks
mocked_dask_client.get_tasks_status.assert_called_once()
if reboot_state.expected_run_state in COMPLETED_STATES:
@@ -1279,7 +1311,7 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData:
async def test_handling_cancellation_of_jobs_after_reboot(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
mocked_dask_client: mock.MagicMock,
aiopg_engine: aiopg.sa.engine.Engine,
running_project_mark_for_cancellation: RunningProject,
@@ -1309,7 +1341,7 @@ async def mocked_get_tasks_status(job_ids: list[str]) -> list[DaskClientTaskStat
mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status
# Running the scheduler, should actually cancel the run now
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_dask_client.abort_computation_task.assert_called()
assert mocked_dask_client.abort_computation_task.call_count == len(
[
@@ -1346,7 +1378,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData:
raise TaskCancelledError
mocked_dask_client.get_task_result.side_effect = _return_random_task_result
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
# now should be stopped
await _assert_comp_tasks_db(
aiopg_engine,
@@ -1373,7 +1405,7 @@ def with_fast_service_heartbeat_s(monkeypatch: pytest.MonkeyPatch) -> int:
async def test_running_pipeline_triggers_heartbeat(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
with_fast_service_heartbeat_s: int,
mocked_dask_client: mock.MagicMock,
scheduler: BaseCompScheduler,
@@ -1420,7 +1452,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
project_id=exp_started_task.project_id,
node_id=exp_started_task.node_id,
)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
messages = await _assert_message_received(
resource_tracking_rabbit_client_parser,
@@ -1432,8 +1464,8 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
# -------------------------------------------------------------------------------
# 3. wait a bit and run again we should get another heartbeat, but only one!
await asyncio.sleep(with_fast_service_heartbeat_s + 1)
- await run_comp_scheduler(scheduler)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
+ await schedule_all_pipelines(scheduler)
messages = await _assert_message_received(
resource_tracking_rabbit_client_parser,
1,
@@ -1444,8 +1476,8 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta
# -------------------------------------------------------------------------------
# 4. wait a bit and run again we should get another heartbeat, but only one!
await asyncio.sleep(with_fast_service_heartbeat_s + 1)
- await run_comp_scheduler(scheduler)
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
+ await schedule_all_pipelines(scheduler)
messages = await _assert_message_received(
resource_tracking_rabbit_client_parser,
1,
@@ -1463,7 +1495,7 @@ async def mocked_get_or_create_cluster(mocker: MockerFixture) -> mock.Mock:
async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
published_project: PublishedProject,
@@ -1501,7 +1533,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
published_project.tasks[1],
published_project.tasks[3],
]
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_called()
assert mocked_get_or_create_cluster.call_count == 1
mocked_get_or_create_cluster.reset_mock()
@@ -1516,7 +1548,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
expected_progress=None,
)
# again will trigger the same response
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_called()
assert mocked_get_or_create_cluster.call_count == 1
mocked_get_or_create_cluster.reset_mock()
@@ -1537,7 +1569,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits(
[ClustersKeeperNotAvailableError],
)
async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails(
- with_disabled_scheduler_task: None,
+ with_disabled_auto_scheduling: None,
scheduler: BaseCompScheduler,
aiopg_engine: aiopg.sa.engine.Engine,
published_project: PublishedProject,
@@ -1570,7 +1602,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails(
published_project.tasks[1],
published_project.tasks[3],
]
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_called()
assert mocked_get_or_create_cluster.call_count == 1
mocked_get_or_create_cluster.reset_mock()
@@ -1583,7 +1615,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails(
expected_progress=1.0,
)
# again will not re-trigger the call to clusters-keeper
- await run_comp_scheduler(scheduler)
+ await schedule_all_pipelines(scheduler)
mocked_get_or_create_cluster.assert_not_called()
await _assert_comp_run_db(aiopg_engine, published_project, RunningState.FAILED)
await _assert_comp_tasks_db(