diff --git a/.github/workflows/destroy-preview-env.yaml b/.github/workflows/destroy-preview-env.yaml index d3a99090850..d2ffbea4588 100644 --- a/.github/workflows/destroy-preview-env.yaml +++ b/.github/workflows/destroy-preview-env.yaml @@ -22,10 +22,6 @@ on: type: string description: App Name (likely the GH repo) required: true - vpc-id: - type: string - description: VPC ID for preview envs - required: true aws-region: type: string default: us-east-1 @@ -44,7 +40,6 @@ jobs: ENV_NAME: ${{ inputs.env-name }} ENV_NAME_SNAKE: ${{ inputs.env-snake-name }} APP_NAME: ${{ inputs.app-name }} - VPC_ID: ${{ inputs.vpc-id }} COPILOT_SERVICE: ${{ inputs.copilot-service }} AWS_REGION: us-east-1 steps: diff --git a/.github/workflows/run_build.yaml b/.github/workflows/run_build.yaml index 0a1666787cf..b976d73f911 100644 --- a/.github/workflows/run_build.yaml +++ b/.github/workflows/run_build.yaml @@ -29,7 +29,7 @@ jobs: uses: actions/checkout@v4 - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v3 with: role-to-assume: arn:aws:iam::917902836630:role/cmiml-devops-oidc-github-role role-session-name: OIDC-GHA-session diff --git a/.github/workflows/run_deploy_dev.yaml b/.github/workflows/run_deploy_dev.yaml index f2b827f738b..29f2d85e627 100644 --- a/.github/workflows/run_deploy_dev.yaml +++ b/.github/workflows/run_deploy_dev.yaml @@ -47,6 +47,7 @@ jobs: run: | aws ecs describe-task-definition --task-definition ${{ env.TASK_DEFINITION }} > task-definition.json + - name: Render Amazon ECS task definition id: task-def uses: aws-actions/amazon-ecs-render-task-definition@v1 diff --git a/src/apps/library/crud.py b/src/apps/library/crud.py index 4ce1ab56260..77e8efb9943 100644 --- a/src/apps/library/crud.py +++ b/src/apps/library/crud.py @@ -45,7 +45,7 @@ async def get_all_library_count(self, query_params: QueryParams) -> int: async def get_all_library_items( self, query_params: QueryParams, - ) -> list[LibraryItem]: + ) -> list[LibrarySchema]: query: Query = select( LibrarySchema.id, LibrarySchema.keywords, @@ -70,8 +70,7 @@ async def get_all_library_items( query = paging(query, query_params.page, query_params.limit) results = await self._execute(query) - - return [LibraryItem.from_orm(result) for result in results.all()] + return results.all() # noqa async def get_library_item_by_id(self, id_: uuid.UUID) -> LibraryItem: query: Query = select( diff --git a/src/apps/library/db/schemas.py b/src/apps/library/db/schemas.py index 4398358cb3b..d7f6737257a 100644 --- a/src/apps/library/db/schemas.py +++ b/src/apps/library/db/schemas.py @@ -11,8 +11,8 @@ class LibrarySchema(Base): ForeignKey("applet_histories.id_version", ondelete="RESTRICT"), nullable=False, ) - keywords = Column(ARRAY(String)) - search_keywords = Column(ARRAY(String)) + keywords = Column(ARRAY(String), nullable=False, server_default="{}") + search_keywords = Column(ARRAY(String), nullable=False, server_default="{}") class CartSchema(Base): diff --git a/src/apps/library/domain.py b/src/apps/library/domain.py index 9e82b77fbdc..75f0fe0cd9e 100644 --- a/src/apps/library/domain.py +++ b/src/apps/library/domain.py @@ -1,6 +1,6 @@ import uuid -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, validator from apps.activities.domain.response_type_config import PerformanceTaskType from apps.shared.domain import InternalModel, PublicModel @@ -8,7 +8,7 @@ class AppletLibrary(InternalModel): applet_id_version: str - keywords: list[str] | None = None + keywords: list[str] class AppletLibraryFull(AppletLibrary): @@ -22,12 +22,16 @@ class AppletLibraryInfo(PublicModel): class AppletLibraryCreate(InternalModel): applet_id: uuid.UUID - keywords: list[str] | None = None + keywords: list[str] = Field(default_factory=list) name: str + @validator("keywords", pre=True) + def validate_keywords(cls, keywords: list[str] | None): + return keywords if keywords is not None else [] + class AppletLibraryUpdate(InternalModel): - keywords: list[str] | None = None + keywords: list[str] = Field(default_factory=list) name: str @@ -84,7 +88,7 @@ class _LibraryItem(BaseModel): about: dict[str, str] | None = None image: str = "" theme_id: uuid.UUID | None = None - keywords: list[str] | None = None + keywords: list[str] = Field(default_factory=list) activities: list[LibraryItemActivity] | None = None activity_flows: list[LibraryItemFlow] | None = None diff --git a/src/apps/library/fixtures/libraries.json b/src/apps/library/fixtures/libraries.json index 1fbec694efd..8508c52f9c5 100644 --- a/src/apps/library/fixtures/libraries.json +++ b/src/apps/library/fixtures/libraries.json @@ -7,8 +7,8 @@ "is_deleted": false, "id": "68aadd6c-eb20-4666-85aa-fd6264825c01", "applet_id_version": "92917a56-d586-4613-b7aa-991f2c4b15b2_1.1.0", - "keywords": null, - "search_keywords": null + "keywords": {}, + "search_keywords": {} } } ] diff --git a/src/apps/library/service.py b/src/apps/library/service.py index 4b2482054aa..adb5c72703e 100644 --- a/src/apps/library/service.py +++ b/src/apps/library/service.py @@ -109,7 +109,8 @@ async def get_applets_count(self, query_param: QueryParams) -> int: async def get_all_applets(self, query_params: QueryParams) -> list[PublicLibraryItem]: """Get all applets for library.""" - library_items = await LibraryCRUD(self.session).get_all_library_items(query_params) + library_schemas = await LibraryCRUD(self.session).get_all_library_items(query_params) + library_items = parse_obj_as(list[LibraryItem], library_schemas) for library_item in library_items: library_item = await self._get_full_library_item(library_item) diff --git a/src/apps/library/tests.py b/src/apps/library/tests.py index 31307651bb3..471fb4ca74f 100644 --- a/src/apps/library/tests.py +++ b/src/apps/library/tests.py @@ -461,3 +461,26 @@ async def test_library_get_url_applet_version_does_not_exists( res = resp.json()["result"] assert len(res) == 1 assert res[0]["message"] == AppletVersionDoesNotExistError.message + + @pytest.mark.parametrize( + "kw, exp_kw, exp_status, include_kw", + ( + (["test", "test2"], ["test", "test2"], http.HTTPStatus.CREATED, True), + ([], [], http.HTTPStatus.CREATED, True), + (None, [], http.HTTPStatus.CREATED, False), + (None, [], http.HTTPStatus.CREATED, True), + ), + ) + async def test_library_share_with_empty_kw( + self, client: TestClient, applet_one: AppletFull, tom: User, kw, exp_kw, exp_status, include_kw + ): + client.login(tom) + data = dict(applet_id=applet_one.id, name="PHQ2") + if include_kw: + data["keywords"] = kw + + response = await client.post(self.library_url, data=data) + assert response.status_code == exp_status + if exp_status == http.HTTPStatus.CREATED: + result = response.json()["result"] + assert result["keywords"] == exp_kw diff --git a/src/apps/shared/commands/patch_commands.py b/src/apps/shared/commands/patch_commands.py index 1b0434b7ba6..bb8fab59db5 100644 --- a/src/apps/shared/commands/patch_commands.py +++ b/src/apps/shared/commands/patch_commands.py @@ -46,6 +46,12 @@ description="Set proportion.enabled=True to Maki's applets", manage_session=False, ) +PatchRegister.register( + file_path="m2_6968_create_flows_old_versions.py", + task_id="M2-6968", + description="Create flow history records for particular applets", + manage_session=False, +) PatchRegister.register( file_path="m2_6879_create_deleted_respondents.py", diff --git a/src/apps/shared/commands/patches/m2_4611_add_answer_subjects.py b/src/apps/shared/commands/patches/m2_4611_add_answer_subjects.py index 34ee0796d8f..ec33cc8e685 100644 --- a/src/apps/shared/commands/patches/m2_4611_add_answer_subjects.py +++ b/src/apps/shared/commands/patches/m2_4611_add_answer_subjects.py @@ -1,3 +1,4 @@ +import asyncio import uuid from rich import print @@ -39,13 +40,16 @@ async def main( print(f"Workspace#{i + 1} DB already processed, skip...") continue processed.add(arb_uri) - session_maker = session_manager.get_session(arb_uri) - async with session_maker() as arb_session: - try: - await update_answers(arb_session) - await arb_session.commit() - print(f"Processing workspace#{i + 1} {workspace.id} " f"finished") - except Exception: - await arb_session.rollback() - print(f"[bold red]Workspace#{i + 1} {workspace.id} " f"processing error[/bold red]") - raise + try: + session_maker = session_manager.get_session(arb_uri) + async with session_maker() as arb_session: + try: + await update_answers(arb_session) + await arb_session.commit() + print(f"Processing workspace#{i + 1} {workspace.id} " f"finished") + except Exception: + await arb_session.rollback() + print(f"[bold red]Error: Workspace#{i + 1} {workspace.id} processing error[/bold red]") + raise + except asyncio.TimeoutError: + print(f"[bold red]Error: Workspace#{i + 1} {workspace.id} Timeout error, skipping...[/bold red]") diff --git a/src/apps/shared/commands/patches/m2_6879_create_deleted_respondents.py b/src/apps/shared/commands/patches/m2_6879_create_deleted_respondents.py index ad6bc992c2d..279014bd5b9 100644 --- a/src/apps/shared/commands/patches/m2_6879_create_deleted_respondents.py +++ b/src/apps/shared/commands/patches/m2_6879_create_deleted_respondents.py @@ -1,6 +1,8 @@ +import asyncio import os import uuid +from rich import print from sqlalchemy import func, select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Query @@ -69,10 +71,12 @@ async def get_answers_applets_respondents( async def get_missing_applet_respondent( - session: AsyncSession, owner_id: uuid.UUID, arbitrary_applet_respondents: set[tuple[uuid.UUID, uuid.UUID]] + session: AsyncSession, applet_ids: list[uuid.UUID], arbitrary_applet_respondents: set[tuple[uuid.UUID, uuid.UUID]] ) -> list[tuple[uuid.UUID, uuid.UUID]]: query: Query = select(UserAppletAccessSchema.user_id, UserAppletAccessSchema.applet_id) - query = query.where(UserAppletAccessSchema.owner_id == owner_id, UserAppletAccessSchema.role == Role.RESPONDENT) + query = query.where( + UserAppletAccessSchema.applet_id.in_(applet_ids), UserAppletAccessSchema.role == Role.RESPONDENT + ) db_result = await session.execute(query) roles_users_applets = db_result.all() return list(arbitrary_applet_respondents - set(roles_users_applets)) @@ -91,7 +95,12 @@ async def find_and_create_missing_roles_arbitrary( roles = [] for offset in range(0, count, limit): arbitrary_applet_respondents = await get_answers_applets_respondents(arbitrary_session, limit, offset) - missing_users_applets = await get_missing_applet_respondent(session, owner_id, arbitrary_applet_respondents) + + applet_ids = {x[1] for x in arbitrary_applet_respondents} + + missing_users_applets = await get_missing_applet_respondent( + session, list(applet_ids), arbitrary_applet_respondents + ) for user_id, applet_id in missing_users_applets: schema = UserAppletAccessSchema( user_id=user_id, @@ -130,17 +139,19 @@ async def main(session: AsyncSession, *args, **kwargs): print(f"Workspace#{i + 1} DB already processed, skip...") continue processed.add(arb_uri) - session_maker = session_manager.get_session(arb_uri) - async with session_maker() as arb_session: - try: - await find_and_create_missing_roles_arbitrary(session, arb_session, workspace.user_id) - await arb_session.commit() - print(f"Processing workspace#{i + 1} {workspace.id} " f"finished") - except Exception: - await arb_session.rollback() - print(f"[bold red]Workspace#{i + 1} {workspace.id} " f"processing error[/bold red]") - raise - + try: + session_maker = session_manager.get_session(arb_uri) + async with session_maker() as arb_session: + try: + await find_and_create_missing_roles_arbitrary(session, arb_session, workspace.user_id) + await session.commit() + print(f"Processing workspace#{i + 1} {workspace.id} " f"finished") + except Exception: + await session.rollback() + print(f"[bold red]Error: Workspace#{i + 1} {workspace.id} processing error[/bold red]") + raise + except asyncio.TimeoutError: + print(f"[bold red]Error: Workspace#{i + 1} {workspace.id} Timeout error, skipping...[/bold red]") except Exception as ex: await session.rollback() raise ex diff --git a/src/apps/shared/commands/patches/m2_6968_create_flows_old_versions.py b/src/apps/shared/commands/patches/m2_6968_create_flows_old_versions.py new file mode 100644 index 00000000000..dad345121d7 --- /dev/null +++ b/src/apps/shared/commands/patches/m2_6968_create_flows_old_versions.py @@ -0,0 +1,106 @@ +from rich import print +from sqlalchemy.ext.asyncio import AsyncSession + +SQL_FLOW_HISTORY_CREATE = """ + with applet_versions as ( + select + ah.id, + ah.id_version, + ah.version, + date_trunc('minute', ah.created_at) + interval '1 min' as created_at + from applet_histories ah + left join flow_histories fh on fh.applet_id = ah.id_version + where 1 = 1 + and ah.id = '{applet_id}' + and format( + '%s.%s.%s', + lpad(split_part(ah."version", '.', 1), 2, '0'), + lpad(split_part(ah."version", '.', 2), 2, '0'), + lpad(split_part(ah."version", '.', 3), 2, '0') + ) >= '{from_version_padded}' + and fh.id is null + ), + last_flow_data as ( + select f.* + from flows f + where f.applet_id = '{applet_id}' + ) + insert into flow_histories + select + av.created_at, + av.created_at as updated_at, + lf.is_deleted, + lf."name", + lf.description, + lf.is_single_report, + lf.hide_badge, + lf."order", + format('%s_%s', lf.id::text, av.version) as id_version, + av.id_version as applet_id, + lf.id, + lf.is_hidden, + lf.report_included_activity_name, + lf.report_included_item_name, + lf.extra_fields + from last_flow_data lf + cross join applet_versions av; +""" + +SQL_FLOW_ITEM_HISTORY_CREATE = """ + with applet_versions as ( + select + ah.id, + ah.id_version, + ah.version, + date_trunc('minute', ah.created_at) + interval '1 min' as created_at + from applet_histories ah + left join flow_histories fh on fh.applet_id = ah.id_version + and exists (select 1 from flow_item_histories fih where fih.activity_flow_id = fh.id_version) + where 1 = 1 + and ah.id = '{applet_id}' + and format( + '%s.%s.%s', + lpad(split_part(ah."version", '.', 1), 2, '0'), + lpad(split_part(ah."version", '.', 2), 2, '0'), + lpad(split_part(ah."version", '.', 3), 2, '0') + ) >= '{from_version_padded}' + and fh.id is null + ), + last_flow_item_data as ( + select fi.* + from flows f + join flow_items fi on fi.activity_flow_id = f.id + where f.applet_id = '{applet_id}' + ) + insert into flow_item_histories + select + av.created_at, + av.created_at as updated_at, + lfi.is_deleted, + lfi."order", + format('%s_%s', lfi.id::text, av.version) as id_version, + format('%s_%s', lfi.activity_flow_id::text, av.version) as activity_flow_id, + format('%s_%s', lfi.activity_id::text, av.version) as activity_id, + lfi.id + from last_flow_item_data lfi + cross join applet_versions av; +""" + +applet_versions = ( + ("62b21984-b90b-7f2b-a9e1-c51a00000000", "10.00.00"), + ("7bb7b30e-0d8a-4b13-bc1c-6a733ccc689a", "02.00.00"), +) + + +async def main(session: AsyncSession, *args, **kwargs): + for applet_id, version_padded in applet_versions: + sql = SQL_FLOW_HISTORY_CREATE.format(applet_id=applet_id, from_version_padded=version_padded) + print("Execute:") + print(sql) + await session.execute(sql) + print("Done") + print("Execute:") + print(sql) + sql = SQL_FLOW_ITEM_HISTORY_CREATE.format(applet_id=applet_id, from_version_padded=version_padded) + print("Done") + await session.execute(sql) diff --git a/src/apps/workspaces/commands/arbitrary_server.py b/src/apps/workspaces/commands/arbitrary_server.py index 898e6810d3e..72011f92d3f 100644 --- a/src/apps/workspaces/commands/arbitrary_server.py +++ b/src/apps/workspaces/commands/arbitrary_server.py @@ -1,3 +1,4 @@ +import asyncio import http import io import uuid @@ -53,8 +54,12 @@ def print_data_table(data: WorkspaceArbitraryFields) -> None: print(table) +def error_msg(msg: str): + print(f"[bold red]Error: {msg}[/bold red]") + + def error(msg: str): - print(f"[bold red]Error: \n{msg}[/bold red]") + error_msg(msg) raise typer.Abort() @@ -180,8 +185,14 @@ async def show( if not data: print(f"[bold green]Arbitrary settings are not configured for {owner_email}[/bold green]") return - alembic_version = await get_version(data.database_uri) arbitrary_fields = WorkspaceArbitraryFields.from_orm(data) + try: + alembic_version = await get_version(data.database_uri) + except asyncio.TimeoutError: + alembic_version = "[bold red]ERROR: Timeout[/bold red]" + except Exception as e: + alembic_version = f"[bold red]ERROR: {e}[/bold red]" + output = WorkSpaceArbitraryConsoleOutput( **arbitrary_fields.dict(), email=owner_email, user_id=owner.id, alembic_version=alembic_version ) @@ -191,8 +202,13 @@ async def show( user_crud = UsersCRUD(session) for data in workspaces: user = await user_crud.get_by_id(data.user_id) - alembic_version = await get_version(data.database_uri) arbitrary_fields = WorkspaceArbitraryFields.from_orm(data) + try: + alembic_version = await get_version(data.database_uri) + except asyncio.TimeoutError: + alembic_version = "[bold red]ERROR: Timeout[/bold red]" + except Exception as e: + alembic_version = f"[bold red]ERROR: {e}[/bold red]" output = WorkSpaceArbitraryConsoleOutput( **arbitrary_fields.dict(), email=user.email_encrypted, @@ -243,7 +259,9 @@ async def ping(owner_email: str = typer.Argument(..., help="Workspace owner emai async with session_maker() as session: try: owner = await UsersCRUD(session).get_by_email(owner_email) - data = await WorkspaceService(session, owner.id).get_arbitrary_info_by_owner_id_if_use_arbitrary(owner.id) + data = await WorkspaceService(session, owner.id).get_arbitrary_info_by_owner_id_if_use_arbitrary( + owner.id, in_use_only=False + ) except (UserNotFound, UserIsDeletedError): error(f"User with email {owner_email} not found") except WorkspaceNotFoundError as e: @@ -256,9 +274,11 @@ async def ping(owner_email: str = typer.Argument(..., help="Workspace owner emai print("Check database availability.") await arb_session.execute("select current_date") print(f"[green]Database for user [bold]{owner_email}[/bold] is available.[/green]") + except asyncio.TimeoutError: + error_msg("Timeout error") except Exception as e: - error(str(e)) - print(f"Check bucket {data.storage_bucket} availability.") + error_msg(str(e)) + print(f'Check bucket "{data.storage_bucket}" availability.') storage = await select_storage(owner_id=owner.id, session=session) key = "mindlogger.txt" presigned_data = storage.generate_presigned_post(data.storage_bucket, key) @@ -272,8 +292,8 @@ async def ping(owner_email: str = typer.Argument(..., help="Workspace owner emai if response.status_code == http.HTTPStatus.NO_CONTENT: print(f"[green]Bucket {data.storage_bucket} for user {owner_email} is available.[/green]") else: - print(f"Can not upload test file to bucket {data.storage_bucket} for user {owner_email}") + error_msg("File upload error") print(response.content) except httpx.HTTPError as e: - print(f"Can not upload test file to bucket {data.storage_bucket} for user {owner_email}") + error_msg("File upload error") error(str(e)) diff --git a/src/apps/workspaces/domain/workspace.py b/src/apps/workspaces/domain/workspace.py index ba9ed9b81fc..e331a6dd493 100644 --- a/src/apps/workspaces/domain/workspace.py +++ b/src/apps/workspaces/domain/workspace.py @@ -309,6 +309,12 @@ class WorkSpaceArbitraryConsoleOutput(WorkspaceArbitraryFields): email: str alembic_version: str | None + @validator("use_arbitrary") + def format_arbitrary_usage(cls, value): + if value: + return "[green]True[/green]" + return "[red]False[/red]" + class WorkspaceArbitraryCreate(WorkspaceArbitraryFields): database_uri: str diff --git a/src/apps/workspaces/service/workspace.py b/src/apps/workspaces/service/workspace.py index 48926924868..25e78724d64 100644 --- a/src/apps/workspaces/service/workspace.py +++ b/src/apps/workspaces/service/workspace.py @@ -249,12 +249,14 @@ async def get_arbitrary_info_if_use_arbitrary(self, applet_id: uuid.UUID) -> Wor except ValidationError: return None - async def get_arbitrary_info_by_owner_id_if_use_arbitrary(self, owner_id: uuid.UUID) -> WorkspaceArbitrary | None: + async def get_arbitrary_info_by_owner_id_if_use_arbitrary( + self, owner_id: uuid.UUID, *, in_use_only=True + ) -> WorkspaceArbitrary | None: schema = await UserWorkspaceCRUD(self.session).get_by_user_id(owner_id) - if not schema or not schema.use_arbitrary or not schema.database_uri: + if not schema or (in_use_only and not schema.use_arbitrary) or not schema.database_uri: return None try: - return WorkspaceArbitrary.from_orm(schema) if schema else None + return WorkspaceArbitrary.from_orm(schema) except ValidationError: return None diff --git a/src/infrastructure/database/migrations/versions/2024_06_13_14_24-library_default_keywords.py b/src/infrastructure/database/migrations/versions/2024_06_13_14_24-library_default_keywords.py new file mode 100644 index 00000000000..fb6351f477f --- /dev/null +++ b/src/infrastructure/database/migrations/versions/2024_06_13_14_24-library_default_keywords.py @@ -0,0 +1,54 @@ +"""Library default keywords + +Revision ID: c587d336f28e +Revises: 01115b529336 +Create Date: 2024-06-13 14:24:39.035015 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "c587d336f28e" +down_revision = "01115b529336" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute("UPDATE library SET keywords = '{}'::varchar[] WHERE keywords IS NULL") + op.execute("UPDATE library SET search_keywords = '{}'::varchar[] WHERE search_keywords IS NULL") + op.alter_column( + "library", + "keywords", + existing_type=postgresql.ARRAY(sa.VARCHAR()), + nullable=False, + server_default='{}' + ) + op.alter_column( + "library", + "search_keywords", + existing_type=postgresql.ARRAY(sa.VARCHAR()), + nullable=False, + server_default='{}' + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "library", + "search_keywords", + existing_type=postgresql.ARRAY(sa.VARCHAR()), + nullable=True, + ) + op.alter_column( + "library", + "keywords", + existing_type=postgresql.ARRAY(sa.VARCHAR()), + nullable=True, + ) + # ### end Alembic commands ### diff --git a/src/infrastructure/database/migrations_arbitrary/env.py b/src/infrastructure/database/migrations_arbitrary/env.py index 85cf24804bb..d7722aee764 100644 --- a/src/infrastructure/database/migrations_arbitrary/env.py +++ b/src/infrastructure/database/migrations_arbitrary/env.py @@ -1,6 +1,7 @@ import asyncio import logging import os +import traceback import uuid from logging import getLogger from logging.config import fileConfig @@ -38,6 +39,7 @@ async def get_all_servers(connection): SELECT uw.database_uri, uw.user_id FROM users_workspaces as uw WHERE uw.database_uri is not null and uw.database_uri <> '' + ORDER BY uw.created_at """ ) rows = await connection.execute(query) @@ -84,9 +86,19 @@ async def migrate_arbitrary(): future=True, ) ) - async with connectable.connect() as connection: - await connection.run_sync(do_run_migrations, arbitrary_meta, config) - await connectable.dispose() + try: + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations, arbitrary_meta, config) + migration_log.info(f"Success: {owner_id} successfully migrated") + except asyncio.TimeoutError: + migration_log.error(f"!!! Error during migration of {owner_id}") + migration_log.error("Connection timeout") + except Exception as e: + migration_log.error(f"!!! Error during migration of {owner_id}") + migration_log.error(e) + traceback.print_exception(e) + finally: + await connectable.dispose() def run_migrations_offline() -> None: