Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Issue #3527] Modify the logic around the opportunity change tracking table to never delete records #3565

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
ae394d2
Add JobTable, track in tasks
mikehgrantsgov Jan 17, 2025
ea3d22f
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 17, 2025
98257e3
Update to enums / add metrics to table
mikehgrantsgov Jan 17, 2025
8fa48f5
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 17, 2025
71a80d9
Lint
mikehgrantsgov Jan 17, 2025
e65f8d7
Lint
mikehgrantsgov Jan 17, 2025
53d7b2e
Merge branch 'main' into mikehgrantsgov/3527-modify-load-opp-logic-ne…
mikehgrantsgov Jan 17, 2025
ac89d27
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 17, 2025
4193c2b
Update api/src/task/task.py
mikehgrantsgov Jan 21, 2025
4f72d43
Update api/src/task/task.py
mikehgrantsgov Jan 21, 2025
238e8fd
Update api/src/task/task.py
mikehgrantsgov Jan 21, 2025
43f90be
Remove last_loaded_at and use updated_at instead
mikehgrantsgov Jan 21, 2025
5c18c81
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 21, 2025
2865fbd
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 21, 2025
b5c8d05
Lint
mikehgrantsgov Jan 21, 2025
68030d4
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 21, 2025
54b5d10
Fix
mikehgrantsgov Jan 21, 2025
55922b8
Merge branch 'main' into mikehgrantsgov/3527-modify-load-opp-logic-ne…
mikehgrantsgov Jan 21, 2025
92b21fb
Update migration
mikehgrantsgov Jan 21, 2025
4fe3095
Fix migration
mikehgrantsgov Jan 21, 2025
9efeb56
Update to JobLog, remove has_update
mikehgrantsgov Jan 23, 2025
578fa09
Format
mikehgrantsgov Jan 23, 2025
e62f4e6
Remove query on non-null column
mikehgrantsgov Jan 23, 2025
ad38093
Add task tests
mikehgrantsgov Jan 23, 2025
562c2a7
Catch db errors and rollback/start new transaction to store failed state
mikehgrantsgov Jan 23, 2025
624bd98
Merge branch 'main' into mikehgrantsgov/3527-modify-load-opp-logic-ne…
mikehgrantsgov Jan 23, 2025
e57fc43
Fix head
mikehgrantsgov Jan 23, 2025
aa65d75
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 23, 2025
4342630
Fix migration
mikehgrantsgov Jan 23, 2025
9d51544
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 23, 2025
aa6f5b7
Update transaction management
mikehgrantsgov Jan 24, 2025
2761cdf
Fix test
mikehgrantsgov Jan 24, 2025
6c26a5d
Wrap failed update with db_session.begin
mikehgrantsgov Jan 24, 2025
562daf3
Use utcnow and now eastern, re-move timing block
mikehgrantsgov Jan 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions api/src/constants/lookup_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,3 +130,9 @@ class OpportunityAttachmentType(StrEnum):

class ExternalUserType(StrEnum):
LOGIN_GOV = "login_gov"


class JobStatus(StrEnum):
STARTED = "started"
COMPLETED = "completed"
FAILED = "failed"
Original file line number Diff line number Diff line change
@@ -0,0 +1,225 @@
"""Rename tables and create job table

Revision ID: dc04ce955a9a
Revises: 99bb8e01ad38
Create Date: 2025-01-16 18:34:48.013913

"""

import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import text

# revision identifiers, used by Alembic.
revision = "dc04ce955a9a"
down_revision = "fe052c05c757"
branch_labels = None
depends_on = None


create_trigger_function = """
CREATE OR REPLACE FUNCTION update_opportunity_search_queue()
RETURNS TRIGGER AS $$
DECLARE
opp_id bigint;
BEGIN
-- Determine the opportunity_id based on the table
CASE TG_TABLE_NAME
WHEN 'link_opportunity_summary_funding_instrument' THEN
opp_id := (SELECT opportunity_id FROM api.opportunity_summary WHERE opportunity_summary_id = NEW.opportunity_summary_id);
WHEN 'link_opportunity_summary_funding_category' THEN
opp_id := (SELECT opportunity_id FROM api.opportunity_summary WHERE opportunity_summary_id = NEW.opportunity_summary_id);
WHEN 'link_opportunity_summary_applicant_type' THEN
opp_id := (SELECT opportunity_id FROM api.opportunity_summary WHERE opportunity_summary_id = NEW.opportunity_summary_id);
WHEN 'opportunity_summary' THEN
opp_id := NEW.opportunity_id;
WHEN 'current_opportunity_summary' THEN
opp_id := NEW.opportunity_id;
ELSE
opp_id := NEW.opportunity_id;
END CASE;

INSERT INTO api.opportunity_change_audit (opportunity_id)
VALUES (opp_id)
ON CONFLICT (opportunity_id)
DO UPDATE SET updated_at = CURRENT_TIMESTAMP;

RETURN NEW;
END;
$$ LANGUAGE plpgsql;
"""


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"job_log",
sa.Column("job_id", sa.UUID(), nullable=False),
sa.Column("job_type", sa.Text(), nullable=False),
sa.Column(
"job_status",
sa.Text(),
nullable=False,
),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.PrimaryKeyConstraint("job_id", name=op.f("job_pkey")),
schema="api",
)
op.create_table(
"opportunity_change_audit",
sa.Column("opportunity_id", sa.BigInteger(), nullable=False),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["opportunity_id"],
["api.opportunity.opportunity_id"],
name=op.f("opportunity_change_audit_opportunity_id_opportunity_fkey"),
),
sa.PrimaryKeyConstraint("opportunity_id", name=op.f("opportunity_change_audit_pkey")),
schema="api",
)
op.create_index(
op.f("opportunity_change_audit_opportunity_id_idx"),
"opportunity_change_audit",
["opportunity_id"],
unique=False,
schema="api",
)

op.execute(create_trigger_function)

# Insert all existing opportunities into the audit table
op.execute(
text(
"""
INSERT INTO api.opportunity_change_audit (opportunity_id, created_at, updated_at)
SELECT
opportunity_id,
CURRENT_TIMESTAMP as created_at,
CURRENT_TIMESTAMP as updated_at
FROM api.opportunity
ON CONFLICT (opportunity_id) DO NOTHING
"""
)
)

op.drop_index(
"opportunity_search_index_queue_opportunity_id_idx",
table_name="opportunity_search_index_queue",
schema="api",
)
op.drop_table("opportunity_search_index_queue", schema="api")

op.create_table(
"lk_job_status",
sa.Column("job_status_id", sa.Integer(), nullable=False),
sa.Column("description", sa.Text(), nullable=False),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.PrimaryKeyConstraint("job_status_id", name=op.f("lk_job_status_pkey")),
schema="api",
)
op.add_column("job_log", sa.Column("job_status_id", sa.Integer(), nullable=False), schema="api")
op.add_column(
"job_log",
sa.Column("metrics", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
schema="api",
)
op.create_foreign_key(
op.f("job_log_job_status_id_lk_job_status_fkey"),
"job_log",
"lk_job_status",
["job_status_id"],
["job_status_id"],
source_schema="api",
referent_schema="api",
)
op.drop_column("job_log", "job_status", schema="api")
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"job_log",
sa.Column("job_status", sa.TEXT(), autoincrement=False, nullable=False),
schema="api",
)
op.drop_constraint(
op.f("job_job_status_id_lk_job_status_fkey"), "job_log", schema="api", type_="foreignkey"
)
op.drop_column("job_log", "metrics", schema="api")
op.drop_column("job_log", "job_status_id", schema="api")
op.drop_table("lk_job_status", schema="api")
op.create_table(
"opportunity_search_index_queue",
sa.Column("opportunity_id", sa.BIGINT(), autoincrement=False, nullable=False),
sa.Column(
"created_at",
postgresql.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
autoincrement=False,
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
autoincrement=False,
nullable=False,
),
sa.ForeignKeyConstraint(
["opportunity_id"],
["api.opportunity.opportunity_id"],
name="opportunity_search_index_queue_opportunity_id_opportunity_fkey",
),
sa.PrimaryKeyConstraint("opportunity_id", name="opportunity_search_index_queue_pkey"),
schema="api",
)
op.create_index(
"opportunity_search_index_queue_opportunity_id_idx",
"opportunity_search_index_queue",
["opportunity_id"],
unique=False,
schema="api",
)
op.drop_index(
op.f("opportunity_change_audit_opportunity_id_idx"),
table_name="opportunity_change_audit",
schema="api",
)
op.drop_table("opportunity_change_audit", schema="api")
op.drop_table("job_log", schema="api")
# ### end Alembic commands ###
11 changes: 10 additions & 1 deletion api/src/db/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
import logging

from . import agency_models, base, extract_models, lookup_models, opportunity_models, user_models
from . import (
agency_models,
base,
extract_models,
lookup_models,
opportunity_models,
task_models,
user_models,
)

logger = logging.getLogger(__name__)

Expand All @@ -15,4 +23,5 @@
"agency_models",
"user_models",
"extract_models",
"task_models",
]
21 changes: 21 additions & 0 deletions api/src/db/models/lookup_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
ExtractType,
FundingCategory,
FundingInstrument,
JobStatus,
OpportunityAttachmentType,
OpportunityCategory,
OpportunityStatus,
Expand Down Expand Up @@ -116,6 +117,14 @@
]
)

JOB_STATUS_CONFIG = LookupConfig(
[
LookupStr(JobStatus.STARTED, 1),
LookupStr(JobStatus.COMPLETED, 2),
LookupStr(JobStatus.FAILED, 3),
]
)

EXTERNAL_USER_TYPE_CONFIG = LookupConfig([LookupStr(ExternalUserType.LOGIN_GOV, 1)])

EXTRACT_TYPE_CONFIG = LookupConfig(
Expand Down Expand Up @@ -266,3 +275,15 @@ def from_lookup(cls, lookup: Lookup) -> "LkExtractType":
return LkExtractType(
extract_type_id=lookup.lookup_val, description=lookup.get_description()
)


@LookupRegistry.register_lookup(JOB_STATUS_CONFIG)
class LkJobStatus(LookupTable, TimestampMixin):
__tablename__ = "lk_job_status"

job_status_id: Mapped[int] = mapped_column(primary_key=True)
description: Mapped[str]

@classmethod
def from_lookup(cls, lookup: Lookup) -> "LkJobStatus":
return LkJobStatus(job_status_id=lookup.lookup_val, description=lookup.get_description())
6 changes: 3 additions & 3 deletions api/src/db/models/opportunity_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def agency(self) -> str | None:
back_populates="opportunity", uselist=True, cascade="all, delete-orphan"
)

opportunity_search_index_queue: Mapped["OpportunitySearchIndexQueue | None"] = relationship(
opportunity_change_audit: Mapped["OpportunityChangeAudit | None"] = relationship(
back_populates="opportunity", single_parent=True, cascade="all, delete-orphan"
)

Expand Down Expand Up @@ -452,8 +452,8 @@ class OpportunityAttachment(ApiSchemaTable, TimestampMixin):
legacy_folder_id: Mapped[int | None] = mapped_column(BigInteger)


class OpportunitySearchIndexQueue(ApiSchemaTable, TimestampMixin):
__tablename__ = "opportunity_search_index_queue"
class OpportunityChangeAudit(ApiSchemaTable, TimestampMixin):
__tablename__ = "opportunity_change_audit"

opportunity_id: Mapped[int] = mapped_column(
BigInteger, ForeignKey(Opportunity.opportunity_id), primary_key=True, index=True
Expand Down
22 changes: 22 additions & 0 deletions api/src/db/models/task_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import uuid

from sqlalchemy import ForeignKey
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.orm import Mapped, mapped_column

from src.adapters.db.type_decorators.postgres_type_decorators import LookupColumn
from src.db.models.base import ApiSchemaTable, TimestampMixin
from src.db.models.lookup_models import JobStatus, LkJobStatus


class JobLog(ApiSchemaTable, TimestampMixin):
__tablename__ = "job_log"

job_id: Mapped[uuid.UUID] = mapped_column(UUID, primary_key=True, default=uuid.uuid4)
job_type: Mapped[str]
job_status: Mapped[JobStatus] = mapped_column(
"job_status_id",
LookupColumn(LkJobStatus),
ForeignKey(LkJobStatus.job_status_id),
)
metrics: Mapped[dict | None] = mapped_column(JSONB)
Loading