From 97e41c5f9eb7c7e197280c5fd7acffc2af26c9de Mon Sep 17 00:00:00 2001 From: Bobby Novak <176936850+rnovak338@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:33:18 -0400 Subject: [PATCH] Django FSM migration to Viewflow - Added new `django-viewflow` and `django-filter` dependencies to `requirements.txt`. - New file `viewflow.py` under `/audit/models/` which contains the FSM logic for transitioning an SAC. - Moved `STATUS` enumeration outside of the `SingleAuditChecklist` class. This required import/reference changes across many files and tests. - Removed references of old deprecated library `django-fsm`. - New migration to handle the changing the SAC's `submission_name` field to remove dependency on the deprecated `django-fsm`. --- backend/audit/admin.py | 42 +- backend/audit/intake_to_dissemination.py | 2 +- .../0013_singleauditchecklistflow_and_more.py | 47 ++ backend/audit/models/models.py | 171 ++----- backend/audit/models/viewflow.py | 199 ++++++++ backend/audit/test_admin.py | 23 +- backend/audit/test_intake_to_dissemination.py | 19 +- backend/audit/test_models.py | 34 +- backend/audit/test_views.py | 3 +- .../audit/views/submission_progress_view.py | 22 +- .../audit/views/unlock_after_certification.py | 17 +- backend/audit/views/views.py | 69 +-- .../end_to_end_core.py | 16 +- ...nd_regenerate_dissemination_from_intake.py | 5 +- .../remove_workbook_artifacts.py | 430 +++++++++--------- .../test_remove_workbook_artifacts.py | 310 ++++++------- backend/requirements.txt | 12 + backend/requirements/requirements.in | 4 +- .../commands/check_cog_over_for_year.py | 3 +- 19 files changed, 776 insertions(+), 652 deletions(-) create mode 100644 backend/audit/migrations/0013_singleauditchecklistflow_and_more.py create mode 100644 backend/audit/models/viewflow.py diff --git a/backend/audit/admin.py b/backend/audit/admin.py index 049488679..48c691f58 100644 --- a/backend/audit/admin.py +++ b/backend/audit/admin.py @@ -12,6 +12,8 @@ SacValidationWaiver, UeiValidationWaiver, ) +from audit.models.models import STATUS +from audit.models.viewflow import sac_transition from audit.validators import ( validate_auditee_certification_json, validate_auditor_certification_json, @@ -154,8 +156,8 @@ def save_model(self, request, obj, form, change): try: sac = SingleAuditChecklist.objects.get(report_id=obj.report_id_id) if sac.submission_status in [ - SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, ]: logger.info( f"User {request.user.email} is applying waiver for SAC with status: {sac.submission_status}" @@ -167,7 +169,7 @@ def save_model(self, request, obj, form, change): f"SAC {sac.report_id} updated successfully with waiver by user: {request.user.email}." ) elif ( - SingleAuditChecklist.STATUS.IN_PROGRESS + STATUS.IN_PROGRESS and SacValidationWaiver.TYPES.FINDING_REFERENCE_NUMBER in obj.waiver_types ): @@ -182,7 +184,7 @@ def save_model(self, request, obj, form, change): messages.set_level(request, messages.WARNING) messages.warning( request, - f"Cannot apply waiver to SAC with status {sac.submission_status}. Expected status to be one of {SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION}, {SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED}, or {SingleAuditChecklist.STATUS.IN_PROGRESS}.", + f"Cannot apply waiver to SAC with status {sac.submission_status}. Expected status to be one of {STATUS.READY_FOR_CERTIFICATION}, {STATUS.AUDITOR_CERTIFIED}, or {STATUS.IN_PROGRESS}.", ) logger.warning( f"User {request.user.email} attempted to apply waiver to SAC with invalid status: {sac.submission_status}" @@ -218,20 +220,13 @@ def handle_auditor_certification(self, request, obj, sac): }, } ) - if ( - sac.submission_status - == SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION - ): + if sac.submission_status == STATUS.READY_FOR_CERTIFICATION: validated = validate_auditor_certification_json(auditor_certification) sac.auditor_certification = validated - sac.transition_to_auditor_certified() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.AUDITOR_CERTIFICATION_COMPLETED, - ) - logger.info( - f"Auditor certification completed for SAC {sac.report_id} by user: {request.user.email}." - ) + if sac_transition(request, sac, transition_to=STATUS.AUDITOR_CERTIFIED): + logger.info( + f"Auditor certification completed for SAC {sac.report_id} by user: {request.user.email}." + ) def handle_auditee_certification(self, request, obj, sac): if SacValidationWaiver.TYPES.AUDITEE_CERTIFYING_OFFICIAL in obj.waiver_types: @@ -257,17 +252,14 @@ def handle_auditee_certification(self, request, obj, sac): }, } ) - if sac.submission_status == SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED: + if sac.submission_status == STATUS.AUDITOR_CERTIFIED: validated = validate_auditee_certification_json(auditee_certification) sac.auditee_certification = validated - sac.transition_to_auditee_certified() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.AUDITEE_CERTIFICATION_COMPLETED, - ) - logger.info( - f"Auditee certification completed for SAC {sac.report_id} by user: {request.user.email}." - ) + + if sac_transition(request, sac, transition_to=STATUS.AUDITEE_CERTIFIED): + logger.info( + f"Auditee certification completed for SAC {sac.report_id} by user: {request.user.email}." + ) class UeiValidationWaiverAdmin(admin.ModelAdmin): diff --git a/backend/audit/intake_to_dissemination.py b/backend/audit/intake_to_dissemination.py index 6f8fde46b..4ebf5b781 100644 --- a/backend/audit/intake_to_dissemination.py +++ b/backend/audit/intake_to_dissemination.py @@ -284,7 +284,7 @@ def load_general(self): oversight_agency = self.single_audit_checklist.oversight_agency dates_by_status = self._get_dates_from_sac() - status = self.single_audit_checklist.STATUS + status = self.single_audit_checklist.get_statuses() ready_for_certification_date = dates_by_status[status.READY_FOR_CERTIFICATION] if self.mode == IntakeToDissemination.DISSEMINATION: submitted_date = self._convert_utc_to_american_samoa_zone( diff --git a/backend/audit/migrations/0013_singleauditchecklistflow_and_more.py b/backend/audit/migrations/0013_singleauditchecklistflow_and_more.py new file mode 100644 index 000000000..f5cc6b61b --- /dev/null +++ b/backend/audit/migrations/0013_singleauditchecklistflow_and_more.py @@ -0,0 +1,47 @@ +# Generated by Django 5.1 on 2024-09-18 18:44 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("audit", "0012_alter_sacvalidationwaiver_waiver_types"), + ] + + operations = [ + migrations.CreateModel( + name="SingleAuditChecklistFlow", + fields=[ + ( + "singleauditchecklist_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="audit.singleauditchecklist", + ), + ), + ], + bases=("audit.singleauditchecklist",), + ), + migrations.AlterField( + model_name="singleauditchecklist", + name="submission_status", + field=models.CharField( + choices=[ + ("in_progress", "In Progress"), + ("ready_for_certification", "Ready for Certification"), + ("auditor_certified", "Auditor Certified"), + ("auditee_certified", "Auditee Certified"), + ("certified", "Certified"), + ("submitted", "Submitted"), + ("disseminated", "Disseminated"), + ], + default="in_progress", + ), + ), + ] diff --git a/backend/audit/models/models.py b/backend/audit/models/models.py index e74829893..571e2c1d2 100644 --- a/backend/audit/models/models.py +++ b/backend/audit/models/models.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone, timedelta +from datetime import timedelta from itertools import chain import json import logging @@ -13,8 +13,6 @@ from django.utils.translation import gettext_lazy as _ from django.utils import timezone as django_timezone -from django_fsm import FSMField, transition - import audit.cross_validation from audit.cross_validation.naming import SECTION_NAMES from audit.intake_to_dissemination import IntakeToDissemination @@ -161,6 +159,20 @@ class LateChangeError(Exception): """ +class STATUS: + """ + The possible states of a submission. + """ + + IN_PROGRESS = "in_progress" + READY_FOR_CERTIFICATION = "ready_for_certification" + AUDITOR_CERTIFIED = "auditor_certified" + AUDITEE_CERTIFIED = "auditee_certified" + CERTIFIED = "certified" + SUBMITTED = "submitted" + DISSEMINATED = "disseminated" + + class SingleAuditChecklist(models.Model, GeneralInformationMixin): # type: ignore """ Monolithic Single Audit Checklist. @@ -185,7 +197,7 @@ def save(self, *args, **kwargs): in progress isn't being altered; skip this if we know this submission is in progress. """ - if self.submission_status != self.STATUS.IN_PROGRESS: + if self.submission_status != STATUS.IN_PROGRESS: try: self._reject_late_changes() except LateChangeError as err: @@ -278,18 +290,11 @@ def get_friendly_status(self) -> str: """Return the friendly version of submission_status.""" return dict(self.STATUS_CHOICES)[self.submission_status] - # Constants: - class STATUS: - """The states that a submission can be in.""" - - IN_PROGRESS = "in_progress" - READY_FOR_CERTIFICATION = "ready_for_certification" - AUDITOR_CERTIFIED = "auditor_certified" - AUDITEE_CERTIFIED = "auditee_certified" - CERTIFIED = "certified" - SUBMITTED = "submitted" - DISSEMINATED = "disseminated" + def get_statuses(self) -> type[STATUS]: + """Return all possible statuses.""" + return STATUS + # Constants: STATUS_CHOICES = ( (STATUS.IN_PROGRESS, "In Progress"), (STATUS.READY_FOR_CERTIFICATION, "Ready for Certification"), @@ -324,7 +329,9 @@ class STATUS: # 0. Meta data submitted_by = models.ForeignKey(User, on_delete=models.PROTECT) date_created = models.DateTimeField(auto_now_add=True) - submission_status = FSMField(default=STATUS.IN_PROGRESS, choices=STATUS_CHOICES) + submission_status = models.CharField( + default=STATUS.IN_PROGRESS, choices=STATUS_CHOICES + ) data_source = models.CharField(default="GSAFAC") # implement an array of tuples as two arrays since we can only have simple fields inside an array @@ -502,138 +509,26 @@ def validate_individually(self): return result - @transition( - field="submission_status", - source=STATUS.IN_PROGRESS, - target=STATUS.READY_FOR_CERTIFICATION, - ) - def transition_to_ready_for_certification(self): - """ - The permission checks verifying that the user attempting to do this has - the appropriate privileges will be done at the view level. - """ - self.transition_name.append(SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION) - self.transition_date.append(datetime.now(timezone.utc)) - - @transition( - field="submission_status", - source=[ - STATUS.READY_FOR_CERTIFICATION, - STATUS.AUDITOR_CERTIFIED, - STATUS.AUDITEE_CERTIFIED, - ], - target=STATUS.IN_PROGRESS, - ) - def transition_to_in_progress_again(self): - """ - The permission checks verifying that the user attempting to do this has - the appropriate privileges will be done at the view level. - """ - - # null out any existing certifications on this submission - self.auditor_certification = None - self.auditee_certification = None - - self.transition_name.append(SingleAuditChecklist.STATUS.IN_PROGRESS) - self.transition_date.append(datetime.now(timezone.utc)) - - @transition( - field="submission_status", - source=STATUS.READY_FOR_CERTIFICATION, - target=STATUS.AUDITOR_CERTIFIED, - ) - def transition_to_auditor_certified(self): - """ - The permission checks verifying that the user attempting to do this has - the appropriate privileges will be done at the view level. - """ - self.transition_name.append(SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED) - self.transition_date.append(datetime.now(timezone.utc)) - - @transition( - field="submission_status", - source=STATUS.AUDITOR_CERTIFIED, - target=STATUS.AUDITEE_CERTIFIED, - ) - def transition_to_auditee_certified(self): - """ - The permission checks verifying that the user attempting to do this has - the appropriate privileges will be done at the view level. - """ - self.transition_name.append(SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED) - self.transition_date.append(datetime.now(timezone.utc)) - - @transition( - field="submission_status", - source=STATUS.AUDITEE_CERTIFIED, - target=STATUS.SUBMITTED, - ) - def transition_to_submitted(self): - """ - The permission checks verifying that the user attempting to do this has - the appropriate privileges will be done at the view level. - """ - - self.transition_name.append(SingleAuditChecklist.STATUS.SUBMITTED) - self.transition_date.append(datetime.now(timezone.utc)) - - @transition( - field="submission_status", - source=STATUS.SUBMITTED, - target=STATUS.DISSEMINATED, - ) - def transition_to_disseminated(self): - logger.info("Transitioning to DISSEMINATED") - self.transition_name.append(SingleAuditChecklist.STATUS.DISSEMINATED) - self.transition_date.append(datetime.now(timezone.utc)) - - @transition( - field="submission_status", - source=[ - STATUS.READY_FOR_CERTIFICATION, - STATUS.AUDITOR_CERTIFIED, - STATUS.AUDITEE_CERTIFIED, - STATUS.CERTIFIED, - ], - target=STATUS.SUBMITTED, - ) - def transition_to_in_progress(self): - """ - Any edit to a submission in the following states should result in it - moving back to STATUS.IN_PROGRESS: - - + STATUS.READY_FOR_CERTIFICATION - + STATUS.AUDITOR_CERTIFIED - + STATUS.AUDITEE_CERTIFIED - + STATUS.CERTIFIED - - For the moment we're not trying anything fancy like catching changes at - the model level, and will again leave it up to the views to track that - changes have been made at that point. - """ - self.transition_name.append(SingleAuditChecklist.STATUS.SUBMITTED) - self.transition_date.append(datetime.now(timezone.utc)) - @property def is_auditee_certified(self): return self.submission_status in [ - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, - SingleAuditChecklist.STATUS.CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, + STATUS.AUDITEE_CERTIFIED, + STATUS.CERTIFIED, + STATUS.SUBMITTED, ] @property def is_auditor_certified(self): return self.submission_status in [ - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, - SingleAuditChecklist.STATUS.CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, + STATUS.AUDITEE_CERTIFIED, + STATUS.AUDITOR_CERTIFIED, + STATUS.CERTIFIED, + STATUS.SUBMITTED, ] @property def is_submitted(self): - return self.submission_status in [SingleAuditChecklist.STATUS.SUBMITTED] + return self.submission_status in [STATUS.SUBMITTED] def get_transition_date(self, status): index = self.transition_name.index(status) @@ -663,7 +558,7 @@ class ExcelFile(models.Model): date_created = models.DateTimeField(auto_now_add=True) def save(self, *args, **kwargs): - if self.sac.submission_status != SingleAuditChecklist.STATUS.IN_PROGRESS: + if self.sac.submission_status != STATUS.IN_PROGRESS: raise LateChangeError("Attemtped Excel file upload") self.filename = f"{self.sac.report_id}--{self.form_section}.xlsx" @@ -710,7 +605,7 @@ class SingleAuditReportFile(models.Model): def save(self, *args, **kwargs): report_id = SingleAuditChecklist.objects.get(id=self.sac.id).report_id self.filename = f"{report_id}.pdf" - if self.sac.submission_status != self.sac.STATUS.IN_PROGRESS: + if self.sac.submission_status != STATUS.IN_PROGRESS: raise LateChangeError("Attempted PDF upload") event_user = kwargs.pop("event_user", None) diff --git a/backend/audit/models/viewflow.py b/backend/audit/models/viewflow.py new file mode 100644 index 000000000..cd16454dd --- /dev/null +++ b/backend/audit/models/viewflow.py @@ -0,0 +1,199 @@ +from audit.models import SingleAuditChecklist, SubmissionEvent +from audit.models.models import STATUS +import datetime +import logging +import viewflow.fsm + +logger = logging.getLogger(__name__) + + +def sac_transition(request, sac, **kwargs): + """ + Transitions the submission_state for a SingleAuditChecklist (sac). + """ + + user = None + flow = SingleAuditChecklistFlow(sac) + target = kwargs.get("transition_to", None) + + # optional - only needed when a user is involved. + if request: + user = request.user + + # SAC must transition to a target state. + if target is None: + return False + + if target == STATUS.IN_PROGRESS: + flow.transition_to_in_progress_again() + sac.save( + event_user=user, + event_type=SubmissionEvent.EventType.UNLOCKED_AFTER_CERTIFICATION, + ) + return True + + elif target == STATUS.READY_FOR_CERTIFICATION: + flow.transition_to_ready_for_certification() + sac.save( + event_user=user, + event_type=SubmissionEvent.EventType.LOCKED_FOR_CERTIFICATION, + ) + return True + + elif target == STATUS.AUDITEE_CERTIFIED: + flow.transition_to_auditee_certified() + sac.save( + event_user=user, + event_type=SubmissionEvent.EventType.AUDITEE_CERTIFICATION_COMPLETED, + ) + return True + + elif target == STATUS.AUDITOR_CERTIFIED: + flow.transition_to_auditor_certified() + sac.save( + event_user=user, + event_type=SubmissionEvent.EventType.AUDITOR_CERTIFICATION_COMPLETED, + ) + return True + + elif target == STATUS.SUBMITTED: + flow.transition_to_submitted() + sac.save( + event_user=user, + event_type=SubmissionEvent.EventType.SUBMITTED, + ) + return True + + elif target == STATUS.DISSEMINATED: + flow.transition_to_disseminated() + sac.save( + event_user=user, + event_type=SubmissionEvent.EventType.DISSEMINATED, + ) + return True + + return False + + +class SingleAuditChecklistFlow(SingleAuditChecklist): + """ + Handles transitioning of states for an SAC. + """ + + state = viewflow.fsm.State(STATUS, default=STATUS.IN_PROGRESS) + + def __init__(self, sac): + self.sac = sac + + @state.setter() + def _set_sac_state(self, value): + self.sac.submission_status = value + + @state.getter() + def _get_sac_state(self): + return self.sac.submission_status + + @state.transition( + source=STATUS.IN_PROGRESS, + target=STATUS.READY_FOR_CERTIFICATION, + ) + def transition_to_ready_for_certification(self): + """ + The permission checks verifying that the user attempting to do this has + the appropriate privileges will be done at the view level. + """ + self.sac.transition_name.append(STATUS.READY_FOR_CERTIFICATION) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) + + @state.transition( + source=[ + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, + ], + target=STATUS.IN_PROGRESS, + ) + def transition_to_in_progress_again(self): + """ + The permission checks verifying that the user attempting to do this has + the appropriate privileges will be done at the view level. + """ + + # null out any existing certifications on this submission + self.sac.auditor_certification = None + self.sac.auditee_certification = None + + self.sac.transition_name.append(STATUS.IN_PROGRESS) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) + + @state.transition( + source=STATUS.READY_FOR_CERTIFICATION, + target=STATUS.AUDITOR_CERTIFIED, + ) + def transition_to_auditor_certified(self): + """ + The permission checks verifying that the user attempting to do this has + the appropriate privileges will be done at the view level. + """ + self.sac.transition_name.append(STATUS.AUDITOR_CERTIFIED) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) + + @state.transition( + source=STATUS.AUDITOR_CERTIFIED, + target=STATUS.AUDITEE_CERTIFIED, + ) + def transition_to_auditee_certified(self): + """ + The permission checks verifying that the user attempting to do this has + the appropriate privileges will be done at the view level. + """ + self.sac.transition_name.append(STATUS.AUDITEE_CERTIFIED) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) + + @state.transition( + source=STATUS.AUDITEE_CERTIFIED, + target=STATUS.SUBMITTED, + ) + def transition_to_submitted(self): + """ + The permission checks verifying that the user attempting to do this has + the appropriate privileges will be done at the view level. + """ + + self.sac.transition_name.append(STATUS.SUBMITTED) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) + + @state.transition( + source=STATUS.SUBMITTED, + target=STATUS.DISSEMINATED, + ) + def transition_to_disseminated(self): + logger.info("Transitioning to DISSEMINATED") + self.sac.transition_name.append(STATUS.DISSEMINATED) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) + + @state.transition( + source=[ + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, + STATUS.CERTIFIED, + ], + target=STATUS.SUBMITTED, + ) + def transition_to_in_progress(self): + """ + Any edit to a submission in the following states should result in it + moving back to STATUS.IN_PROGRESS: + + + STATUS.READY_FOR_CERTIFICATION + + STATUS.AUDITOR_CERTIFIED + + STATUS.AUDITEE_CERTIFIED + + STATUS.CERTIFIED + + For the moment we're not trying anything fancy like catching changes at + the model level, and will again leave it up to the views to track that + changes have been made at that point. + """ + self.sac.transition_name.append(STATUS.SUBMITTED) + self.sac.transition_date.append(datetime.datetime.now(datetime.timezone.utc)) diff --git a/backend/audit/test_admin.py b/backend/audit/test_admin.py index 43a36cd7a..10c3f4b78 100644 --- a/backend/audit/test_admin.py +++ b/backend/audit/test_admin.py @@ -3,6 +3,7 @@ from django.contrib.auth.models import User from django.contrib.messages.storage.fallback import FallbackStorage from .models import SacValidationWaiver, SingleAuditChecklist +from .models.models import STATUS from .admin import SacValidationWaiverAdmin from django.utils import timezone from model_bakery import baker @@ -27,7 +28,7 @@ def setUp(self): # Create a SingleAuditChecklist instance self.sac = baker.make( SingleAuditChecklist, - submission_status=SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, + submission_status=STATUS.READY_FOR_CERTIFICATION, ) # Create a request object @@ -72,14 +73,12 @@ def test_save_model_auditor_certification(self): # Checking results self.sac.refresh_from_db() - self.assertEqual( - self.sac.submission_status, SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED - ) + self.assertEqual(self.sac.submission_status, STATUS.AUDITOR_CERTIFIED) def test_save_model_auditee_certification(self): """Test the save_model method of the SacValidationWaiverAdmin class when the waiver is for auditee certification""" # Set the SAC status to AUDITOR_CERTIFIED - self.sac.submission_status = SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED + self.sac.submission_status = STATUS.AUDITOR_CERTIFIED self.sac.save() # Create a SacValidationWaiver instance @@ -100,9 +99,7 @@ def test_save_model_auditee_certification(self): # Checking results self.sac.refresh_from_db() - self.assertEqual( - self.sac.submission_status, SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED - ) + self.assertEqual(self.sac.submission_status, STATUS.AUDITEE_CERTIFIED) def test_save_model_invalid_status(self): # Set SAC status to an invalid one @@ -143,15 +140,13 @@ def test_handle_auditor_certification(self): # Checking results self.sac.refresh_from_db() - self.assertEqual( - self.sac.submission_status, SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED - ) + self.assertEqual(self.sac.submission_status, STATUS.AUDITOR_CERTIFIED) def test_handle_auditee_certification(self): """Test the handle_auditee_certification method of the SacValidationWaiverAdmin class.""" # Set SAC status to AUDITOR_CERTIFIED - self.sac.submission_status = SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED + self.sac.submission_status = STATUS.AUDITOR_CERTIFIED self.sac.save() # Simulating auditee certification @@ -165,6 +160,4 @@ def test_handle_auditee_certification(self): # Checking results self.sac.refresh_from_db() - self.assertEqual( - self.sac.submission_status, SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED - ) + self.assertEqual(self.sac.submission_status, STATUS.AUDITEE_CERTIFIED) diff --git a/backend/audit/test_intake_to_dissemination.py b/backend/audit/test_intake_to_dissemination.py index 55f685c98..06c42bad0 100644 --- a/backend/audit/test_intake_to_dissemination.py +++ b/backend/audit/test_intake_to_dissemination.py @@ -7,6 +7,7 @@ from faker import Faker from audit.models import SingleAuditChecklist, User +from audit.models.models import STATUS from audit.intake_to_dissemination import IntakeToDissemination from audit.test_views import AUDIT_JSON_FIXTURES, _load_json from audit.utils import Util @@ -26,10 +27,10 @@ def _set_transitions_hour(sac, hour): statuses = [ - SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, + STATUS.SUBMITTED, ] # Get the current time in UTC current = datetime.now(timezone.utc).date() @@ -59,10 +60,10 @@ def __init__(self, methodName: str = "runTest") -> None: def _run_state_transition(self, sac): statuses = [ - SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, + STATUS.SUBMITTED, ] # Get the current time in UTC transition_date = datetime.now(timezone.utc) @@ -460,7 +461,7 @@ def test_submitted_date(self): general = generals.first() # Get the sac submitted date - subdate = self.sac.get_transition_date(self.sac.STATUS.SUBMITTED) + subdate = self.sac.get_transition_date(STATUS.SUBMITTED) # Calculate the date at UTC-11 (the American Samoa timezone does not do DST) date_in_american_samoa = (subdate - timedelta(hours=11)).date() diff --git a/backend/audit/test_models.py b/backend/audit/test_models.py index f662ebe11..30d0e2c00 100644 --- a/backend/audit/test_models.py +++ b/backend/audit/test_models.py @@ -4,7 +4,7 @@ from django.db.utils import IntegrityError from django.test import TestCase -from django_fsm import TransitionNotAllowed +from viewflow.fsm import TransitionNotAllowed from model_bakery import baker from .models import ( @@ -17,6 +17,8 @@ User, generate_sac_report_id, ) +from .models.models import STATUS +from .models.viewflow import sac_transition, SingleAuditChecklistFlow class SingleAuditChecklistTests(TestCase): @@ -69,29 +71,29 @@ def test_submission_status_transitions(self): """ cases = ( ( - [SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION], - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, + [STATUS.READY_FOR_CERTIFICATION], + STATUS.AUDITOR_CERTIFIED, "transition_to_auditor_certified", ), ( - [SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED], - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, + [STATUS.AUDITOR_CERTIFIED], + STATUS.AUDITEE_CERTIFIED, "transition_to_auditee_certified", ), ( [ - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, ], - SingleAuditChecklist.STATUS.SUBMITTED, + STATUS.SUBMITTED, "transition_to_submitted", ), ( [ - SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, ], - SingleAuditChecklist.STATUS.IN_PROGRESS, + STATUS.IN_PROGRESS, "transition_to_in_progress_again", ), ) @@ -101,8 +103,10 @@ def test_submission_status_transitions(self): for status_from in statuses_from: sac = baker.make(SingleAuditChecklist, submission_status=status_from) - transition_method = getattr(sac, transition_name) - transition_method() + transition_method = getattr( + SingleAuditChecklistFlow(sac), transition_name + ) + sac_transition(None, sac, transition_to=status_to) self.assertEqual(sac.submission_status, status_to) self.assertGreaterEqual(sac.get_transition_date(status_to), now) @@ -127,7 +131,7 @@ def test_no_late_changes(self): bad_statuses = [ status[0] for status in SingleAuditChecklist.STATUS_CHOICES - if status[0] != SingleAuditChecklist.STATUS.IN_PROGRESS + if status[0] != STATUS.IN_PROGRESS ] for status_from in bad_statuses: @@ -283,7 +287,7 @@ def test_no_late_upload(self): bad_statuses = [ status[0] for status in SingleAuditChecklist.STATUS_CHOICES - if status[0] != SingleAuditChecklist.STATUS.IN_PROGRESS + if status[0] != STATUS.IN_PROGRESS ] for status_from in bad_statuses: diff --git a/backend/audit/test_views.py b/backend/audit/test_views.py index 3e060dcd3..788033545 100644 --- a/backend/audit/test_views.py +++ b/backend/audit/test_views.py @@ -46,6 +46,7 @@ SubmissionEvent, generate_sac_report_id, ) +from audit.models.models import STATUS from audit.cross_validation.naming import SECTION_NAMES as SN from audit.views import MySubmissions from dissemination.models import FederalAward, General @@ -55,7 +56,7 @@ ACCESS_AND_SUBMISSION_PATH = reverse("report_submission:accessandsubmission") AUDIT_JSON_FIXTURES = Path(__file__).parent / "fixtures" / "json" EDIT_PATH = "audit:EditSubmission" -STATUSES = SingleAuditChecklist.STATUS +STATUSES = STATUS SUBMISSIONS_PATH = reverse("audit:MySubmissions") VALID_ELIGIBILITY_DATA = { diff --git a/backend/audit/views/submission_progress_view.py b/backend/audit/views/submission_progress_view.py index aab878927..da69605c5 100644 --- a/backend/audit/views/submission_progress_view.py +++ b/backend/audit/views/submission_progress_view.py @@ -10,6 +10,7 @@ SingleAuditChecklistAccessRequiredMixin, ) from audit.models import SingleAuditChecklist, SingleAuditReportFile, Access +from audit.models.models import STATUS # Turn the named tuples into dicts because Django templates work with dicts: @@ -137,11 +138,11 @@ def get(self, request, *args, **kwargs): "pre_submission_validation": { "completed": sac.submission_status in [ - SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, - SingleAuditChecklist.STATUS.SUBMITTED, - SingleAuditChecklist.STATUS.DISSEMINATED, + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, + STATUS.SUBMITTED, + STATUS.DISSEMINATED, ], "completed_date": None, "completed_by": None, @@ -151,21 +152,20 @@ def get(self, request, *args, **kwargs): "certification": { "auditor_certified": bool(sac.auditor_certification), "auditor_enabled": sac.submission_status - == SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, + == STATUS.READY_FOR_CERTIFICATION, "auditee_certified": bool(sac.auditee_certification), "auditee_enabled": sac.submission_status - == SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, + == STATUS.AUDITOR_CERTIFIED, }, "submission": { "completed": sac.submission_status in [ - SingleAuditChecklist.STATUS.SUBMITTED, - SingleAuditChecklist.STATUS.DISSEMINATED, + STATUS.SUBMITTED, + STATUS.DISSEMINATED, ], "completed_date": None, "completed_by": None, - "enabled": sac.submission_status - == SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, + "enabled": sac.submission_status == STATUS.AUDITEE_CERTIFIED, }, "report_id": report_id, "auditee_name": sac.auditee_name, diff --git a/backend/audit/views/unlock_after_certification.py b/backend/audit/views/unlock_after_certification.py index 319b3107e..004849985 100644 --- a/backend/audit/views/unlock_after_certification.py +++ b/backend/audit/views/unlock_after_certification.py @@ -10,8 +10,9 @@ ) from audit.models import ( SingleAuditChecklist, - SubmissionEvent, ) +from audit.models.models import STATUS +from audit.models.viewflow import sac_transition logger = logging.getLogger(__name__) @@ -30,9 +31,9 @@ def get(self, request, *args, **kwargs): try: sac = SingleAuditChecklist.objects.get(report_id=report_id) target_statuses = [ - SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION, - SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED, - SingleAuditChecklist.STATUS.AUDITEE_CERTIFIED, + STATUS.READY_FOR_CERTIFICATION, + STATUS.AUDITOR_CERTIFIED, + STATUS.AUDITEE_CERTIFIED, ] context = { "auditee_uei": sac.auditee_uei, @@ -61,12 +62,8 @@ def post(self, request, *args, **kwargs): form.data.get("unlock_after_certification") in acceptable ) if form.is_valid() and should_go_to_in_progress: - sac.transition_to_in_progress_again() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.UNLOCKED_AFTER_CERTIFICATION, - ) - logger.info("Submission unlocked after certification") + if sac_transition(request, sac, transition_to=STATUS.IN_PROGRESS): + logger.info("Submission unlocked after certification") return redirect(reverse("audit:SubmissionProgress", args=[report_id])) diff --git a/backend/audit/views/views.py b/backend/audit/views/views.py index 7820c8b52..18cacf23f 100644 --- a/backend/audit/views/views.py +++ b/backend/audit/views/views.py @@ -35,6 +35,8 @@ SingleAuditReportFile, SubmissionEvent, ) +from audit.models.models import STATUS +from audit.models.viewflow import sac_transition from audit.intakelib.exceptions import ExcelExtractionError from audit.validators import ( validate_auditee_certification_json, @@ -326,10 +328,8 @@ def post(self, request, *args, **kwargs): errors = sac.validate_full() if not errors: - sac.transition_to_ready_for_certification() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.LOCKED_FOR_CERTIFICATION, + sac_transition( + request, sac, transition_to=STATUS.READY_FOR_CERTIFICATION ) return redirect(reverse("audit:SubmissionProgress", args=[report_id])) else: @@ -365,10 +365,7 @@ def get(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if ( - sac.submission_status - != SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION - ): + if sac.submission_status != STATUS.READY_FOR_CERTIFICATION: return redirect(f"/audit/submission-progress/{sac.report_id}") return render(request, "audit/auditor-certification-step-1.html", context) @@ -395,10 +392,7 @@ def post(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if ( - sac.submission_status - != SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION - ): + if sac.submission_status != STATUS.READY_FOR_CERTIFICATION: return redirect(f"/audit/submission-progress/{sac.report_id}") if form.is_valid(): @@ -445,10 +439,7 @@ def get(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if ( - sac.submission_status - != SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION - ): + if sac.submission_status != STATUS.READY_FOR_CERTIFICATION: return redirect(f"/audit/submission-progress/{sac.report_id}") return render(request, "audit/auditor-certification-step-2.html", context) @@ -474,10 +465,7 @@ def post(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if ( - sac.submission_status - != SingleAuditChecklist.STATUS.READY_FOR_CERTIFICATION - ): + if sac.submission_status != STATUS.READY_FOR_CERTIFICATION: return redirect(f"/audit/submission-progress/{sac.report_id}") if form2.is_valid(): @@ -496,12 +484,8 @@ def post(self, request, *args, **kwargs): auditor_certification.update(form_cleaned) validated = validate_auditor_certification_json(auditor_certification) sac.auditor_certification = validated - sac.transition_to_auditor_certified() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.AUDITOR_CERTIFICATION_COMPLETED, - ) - logger.info("Auditor certification saved.", auditor_certification) + if sac_transition(request, sac, transition_to=STATUS.AUDITOR_CERTIFIED): + logger.info("Auditor certification saved.", auditor_certification) return redirect(reverse("audit:SubmissionProgress", args=[report_id])) context["form"] = form2 @@ -532,7 +516,7 @@ def get(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if sac.submission_status != SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED: + if sac.submission_status != STATUS.AUDITOR_CERTIFIED: return redirect(f"/audit/submission-progress/{sac.report_id}") return render(request, "audit/auditee-certification-step-1.html", context) @@ -559,7 +543,7 @@ def post(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if sac.submission_status != SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED: + if sac.submission_status != STATUS.AUDITOR_CERTIFIED: return redirect(f"/audit/submission-progress/{sac.report_id}") if form.is_valid(): @@ -606,7 +590,7 @@ def get(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if sac.submission_status != SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED: + if sac.submission_status != STATUS.AUDITOR_CERTIFIED: return redirect(f"/audit/submission-progress/{sac.report_id}") return render(request, "audit/auditee-certification-step-2.html", context) @@ -631,7 +615,7 @@ def post(self, request, *args, **kwargs): } # Return to checklist, the Audit is not in the correct state. - if sac.submission_status != SingleAuditChecklist.STATUS.AUDITOR_CERTIFIED: + if sac.submission_status != STATUS.AUDITOR_CERTIFIED: return redirect(f"/audit/submission-progress/{sac.report_id}") if form2.is_valid(): @@ -650,12 +634,8 @@ def post(self, request, *args, **kwargs): auditee_certification.update(form_cleaned) validated = validate_auditee_certification_json(auditee_certification) sac.auditee_certification = validated - sac.transition_to_auditee_certified() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.AUDITEE_CERTIFICATION_COMPLETED, - ) - logger.info("Auditee certification saved.", auditee_certification) + if sac_transition(request, sac, transition_to=STATUS.AUDITEE_CERTIFIED): + logger.info("Auditee certification saved.", auditee_certification) return redirect(reverse("audit:SubmissionProgress", args=[report_id])) context["form"] = form2 @@ -727,24 +707,23 @@ def post(self, request, *args, **kwargs): context, ) - sac.transition_to_submitted() - sac.save( - event_user=request.user, event_type=SubmissionEvent.EventType.SUBMITTED - ) + sac_transition(request, sac, transition_to=STATUS.SUBMITTED) with transaction.atomic(): disseminated = sac.disseminate() + print("Disseminated result =-=-=-=-=-=-=") + print(disseminated) + print("=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=") + # disseminated is None if there were no errors. if disseminated is None: - sac.transition_to_disseminated() - sac.save( - event_user=request.user, - event_type=SubmissionEvent.EventType.DISSEMINATED, - ) + print("DONE! This is now disseminated.") + sac_transition(request, sac, transition_to=STATUS.DISSEMINATED) # Remove workbook artifacts after the report has been disseminated. remove_workbook_artifacts(sac) else: + print("Dissemination... has failed.") pass # FIXME: We should now provide a reasonable error to the user. diff --git a/backend/census_historical_migration/end_to_end_core.py b/backend/census_historical_migration/end_to_end_core.py index 800b9e72c..a83527c94 100644 --- a/backend/census_historical_migration/end_to_end_core.py +++ b/backend/census_historical_migration/end_to_end_core.py @@ -17,7 +17,8 @@ MigrationErrorDetail, ) from audit.intake_to_dissemination import IntakeToDissemination -from audit.models import SingleAuditChecklist +from audit.models.models import STATUS +from audit.models.viewflow import SingleAuditChecklistFlow from dissemination.models import ( AdditionalEin, AdditionalUei, @@ -54,15 +55,16 @@ def step_through_certifications(sac, audit_header): - sac.transition_to_ready_for_certification() - sac.transition_to_auditor_certified() - sac.transition_to_auditee_certified() + flow = SingleAuditChecklistFlow(sac) + flow.transition_to_ready_for_certification() + flow.transition_to_auditor_certified() + flow.transition_to_auditee_certified() - sac.transition_name.append(SingleAuditChecklist.STATUS.CERTIFIED) + sac.transition_name.append(STATUS.CERTIFIED) sac.transition_date.append(datetime.now(timezone.utc)) - sac.transition_to_submitted() - sac.transition_to_disseminated() + flow.transition_to_submitted() + flow.transition_to_disseminated() # Patch for transition date diff --git a/backend/dissemination/management/commands/delete_and_regenerate_dissemination_from_intake.py b/backend/dissemination/management/commands/delete_and_regenerate_dissemination_from_intake.py index 70e290316..210fb532b 100644 --- a/backend/dissemination/management/commands/delete_and_regenerate_dissemination_from_intake.py +++ b/backend/dissemination/management/commands/delete_and_regenerate_dissemination_from_intake.py @@ -13,6 +13,7 @@ SecondaryAuditor, ) from audit.models import SingleAuditChecklist +from audit.models.models import STATUS logger = logging.getLogger(__name__) @@ -56,8 +57,8 @@ def handle(self, *args, **_kwargs): # Now, re-run dissemination for everything # in the intake tables. regen_statuses = ( - SingleAuditChecklist.STATUS.DISSEMINATED, - SingleAuditChecklist.STATUS.SUBMITTED, + STATUS.DISSEMINATED, + STATUS.SUBMITTED, ) for sac in SingleAuditChecklist.objects.all(): diff --git a/backend/dissemination/remove_workbook_artifacts.py b/backend/dissemination/remove_workbook_artifacts.py index 008649c8e..31f7095cc 100644 --- a/backend/dissemination/remove_workbook_artifacts.py +++ b/backend/dissemination/remove_workbook_artifacts.py @@ -1,216 +1,214 @@ -import logging -import math - -from django.conf import settings -from audit.models.models import ExcelFile, SingleAuditChecklist -from boto3 import client as boto3_client -from botocore.client import ClientError, Config -from django.core.paginator import Paginator -from django.core.paginator import PageNotAnInteger, EmptyPage - - -logger = logging.getLogger(__name__) - - -def remove_workbook_artifacts(sac): - """ - Remove all workbook artifacts associated with the given sac. - """ - try: - excel_files = ExcelFile.objects.filter(sac=sac) - files = [f"excel/{excel_file.filename}" for excel_file in excel_files] - - if files: - # Delete the files from S3 in bulk - delete_files_in_bulk(files, sac) - - except ExcelFile.DoesNotExist: - logger.info(f"No files found to delete for report: {sac.report_id}") - except Exception as e: - logger.error( - f"Failed to delete files from S3 for report: {sac.report_id}. Error: {e}" - ) - - -def delete_files_in_bulk(filenames, sac): - """Delete files from S3 in bulk.""" - # This client uses the internal endpoint URL because we're making a request to S3 from within the app - s3_client = boto3_client( - service_name="s3", - region_name=settings.AWS_S3_PRIVATE_REGION_NAME, - aws_access_key_id=settings.AWS_PRIVATE_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_PRIVATE_SECRET_ACCESS_KEY, - endpoint_url=settings.AWS_S3_PRIVATE_INTERNAL_ENDPOINT, - config=Config(signature_version="s3v4"), - ) - - try: - delete_objects = [{"Key": filename} for filename in filenames] - - response = s3_client.delete_objects( - Bucket=settings.AWS_PRIVATE_STORAGE_BUCKET_NAME, - Delete={"Objects": delete_objects}, - ) - - deleted_files = response.get("Deleted", []) - for deleted in deleted_files: - logger.info( - f"Successfully deleted {deleted['Key']} from S3 for report: {sac.report_id}" - ) - - errors = response.get("Errors", []) - if errors: - for error in errors: - logger.error( - f"Failed to delete {error['Key']} from S3 for report: {sac.report_id}. Error: {error['Message']}" # nosec B608 - ) - - except ClientError as e: - logger.error( - f"Failed to delete files from S3 for report: {sac.report_id}. Error: {e}" - ) - - -def clean_artifacts(sac_list): - """ - Perform necessary cleanup associated with the given list of sac values. - """ - try: - excel_files = ExcelFile.objects.filter(sac__in=sac_list) - files = [f"excel/{excel_file.filename}" for excel_file in excel_files] - - if files: - logger.info( - f"Found {len(files)} ExcelFile records for reports: {[sac.report_id for sac in sac_list]}" - ) - - # Track results but do not delete the ExcelFile records from the database - successful_deletes, failed_deletes = batch_removal( - files, - sac_list, - { - f"excel/{excel_file.filename}": excel_file.sac.report_id - for excel_file in excel_files - }, - ) - - if failed_deletes: - logger.error( - f"Failed to delete the following files from S3: {failed_deletes}" - ) - if successful_deletes: - logger.info( - f"Successfully deleted the following files from S3: {successful_deletes}" - ) - - except Exception as e: - logger.error(f"Failed to process files for the provided sac values. Error: {e}") - - -def batch_removal(filenames, sac_list, sac_to_report_id_map): - """Delete files from S3 in bulk and return the results.""" - s3_client = boto3_client( - service_name="s3", - region_name=settings.AWS_S3_PRIVATE_REGION_NAME, - aws_access_key_id=settings.AWS_PRIVATE_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_PRIVATE_SECRET_ACCESS_KEY, - endpoint_url=settings.AWS_S3_PRIVATE_INTERNAL_ENDPOINT, - config=Config(signature_version="s3v4"), - ) - - try: - delete_objects = [{"Key": filename} for filename in filenames] - response = s3_client.delete_objects( - Bucket=settings.AWS_PRIVATE_STORAGE_BUCKET_NAME, - Delete={"Objects": delete_objects}, - ) - - successful_deletes = [] - failed_deletes = [] - deleted_files = response.get("Deleted", []) - for deleted in deleted_files: - filename = deleted["Key"] - successful_deletes.append( - { - "filename": filename, - "sac_report_id": sac_to_report_id_map[filename], - } - ) - - errors = response.get("Errors", []) - if errors: - for error in errors: - filename = error["Key"] - failed_deletes.append( - { - "filename": filename, - "sac_report_id": sac_to_report_id_map[filename], - "error_message": error["Message"], - } - ) - - return successful_deletes, failed_deletes - - except ClientError as e: - logger.error( - f"Failed to delete files from S3 for sac values: {[sac.report_id for sac in sac_list]}. Error: {e}" - ) - return [], [{"error_message": str(e)}] - except Exception as e: - logger.error(f"Failed to delete files from S3. Error: {e}") - return [], [{"error_message": str(e)}] - - -def delete_workbooks(partition_number, total_partitions, page_size=10, pages=None): - """Iterates over disseminated reports for the specified partition.""" - - if partition_number < 1 or partition_number > total_partitions: - raise ValueError( - "Invalid partition number. It must be between 1 and the total number of partitions." - ) - - all_ids = ( - SingleAuditChecklist.objects.filter( - submission_status=SingleAuditChecklist.STATUS.DISSEMINATED - ) - .values_list("id", flat=True) - .order_by("id") - ) - - total_ids = len(all_ids) - ids_per_partition = math.ceil(total_ids / total_partitions) - - start_index = (partition_number - 1) * ids_per_partition - end_index = min(partition_number * ids_per_partition, total_ids) - - ids_to_process = all_ids[start_index:end_index] - - sacs = SingleAuditChecklist.objects.filter(id__in=ids_to_process).order_by("id") - - paginator = Paginator(sacs, page_size) - total_pages = ( - paginator.num_pages if pages is None else min(pages, paginator.num_pages) - ) - - logger.info( - f"Retrieving {sacs.count()} reports for partition {partition_number} of {total_partitions}" - ) - - for page_number in range(1, total_pages + 1): - try: - page = paginator.page(page_number) - logger.info( - f"Processing page {page_number} with {page.object_list.count()} reports." - ) - - # Extract sac values from the current page - sac_list = list(page.object_list) - clean_artifacts(sac_list) - - except PageNotAnInteger: - logger.error(f"Page number {page_number} is not an integer.") - except EmptyPage: - logger.info(f"No more pages to process after page {page_number}.") - break - except Exception as e: - logger.error(f"An error occurred while processing page {page_number}: {e}") +import logging +import math + +from django.conf import settings +from audit.models.models import ExcelFile, SingleAuditChecklist, STATUS +from boto3 import client as boto3_client +from botocore.client import ClientError, Config +from django.core.paginator import Paginator +from django.core.paginator import PageNotAnInteger, EmptyPage + + +logger = logging.getLogger(__name__) + + +def remove_workbook_artifacts(sac): + """ + Remove all workbook artifacts associated with the given sac. + """ + try: + excel_files = ExcelFile.objects.filter(sac=sac) + files = [f"excel/{excel_file.filename}" for excel_file in excel_files] + + if files: + # Delete the files from S3 in bulk + delete_files_in_bulk(files, sac) + + except ExcelFile.DoesNotExist: + logger.info(f"No files found to delete for report: {sac.report_id}") + except Exception as e: + logger.error( + f"Failed to delete files from S3 for report: {sac.report_id}. Error: {e}" + ) + + +def delete_files_in_bulk(filenames, sac): + """Delete files from S3 in bulk.""" + # This client uses the internal endpoint URL because we're making a request to S3 from within the app + s3_client = boto3_client( + service_name="s3", + region_name=settings.AWS_S3_PRIVATE_REGION_NAME, + aws_access_key_id=settings.AWS_PRIVATE_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_PRIVATE_SECRET_ACCESS_KEY, + endpoint_url=settings.AWS_S3_PRIVATE_INTERNAL_ENDPOINT, + config=Config(signature_version="s3v4"), + ) + + try: + delete_objects = [{"Key": filename} for filename in filenames] + + response = s3_client.delete_objects( + Bucket=settings.AWS_PRIVATE_STORAGE_BUCKET_NAME, + Delete={"Objects": delete_objects}, + ) + + deleted_files = response.get("Deleted", []) + for deleted in deleted_files: + logger.info( + f"Successfully deleted {deleted['Key']} from S3 for report: {sac.report_id}" + ) + + errors = response.get("Errors", []) + if errors: + for error in errors: + logger.error( + f"Failed to delete {error['Key']} from S3 for report: {sac.report_id}. Error: {error['Message']}" # nosec B608 + ) + + except ClientError as e: + logger.error( + f"Failed to delete files from S3 for report: {sac.report_id}. Error: {e}" + ) + + +def clean_artifacts(sac_list): + """ + Perform necessary cleanup associated with the given list of sac values. + """ + try: + excel_files = ExcelFile.objects.filter(sac__in=sac_list) + files = [f"excel/{excel_file.filename}" for excel_file in excel_files] + + if files: + logger.info( + f"Found {len(files)} ExcelFile records for reports: {[sac.report_id for sac in sac_list]}" + ) + + # Track results but do not delete the ExcelFile records from the database + successful_deletes, failed_deletes = batch_removal( + files, + sac_list, + { + f"excel/{excel_file.filename}": excel_file.sac.report_id + for excel_file in excel_files + }, + ) + + if failed_deletes: + logger.error( + f"Failed to delete the following files from S3: {failed_deletes}" + ) + if successful_deletes: + logger.info( + f"Successfully deleted the following files from S3: {successful_deletes}" + ) + + except Exception as e: + logger.error(f"Failed to process files for the provided sac values. Error: {e}") + + +def batch_removal(filenames, sac_list, sac_to_report_id_map): + """Delete files from S3 in bulk and return the results.""" + s3_client = boto3_client( + service_name="s3", + region_name=settings.AWS_S3_PRIVATE_REGION_NAME, + aws_access_key_id=settings.AWS_PRIVATE_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_PRIVATE_SECRET_ACCESS_KEY, + endpoint_url=settings.AWS_S3_PRIVATE_INTERNAL_ENDPOINT, + config=Config(signature_version="s3v4"), + ) + + try: + delete_objects = [{"Key": filename} for filename in filenames] + response = s3_client.delete_objects( + Bucket=settings.AWS_PRIVATE_STORAGE_BUCKET_NAME, + Delete={"Objects": delete_objects}, + ) + + successful_deletes = [] + failed_deletes = [] + deleted_files = response.get("Deleted", []) + for deleted in deleted_files: + filename = deleted["Key"] + successful_deletes.append( + { + "filename": filename, + "sac_report_id": sac_to_report_id_map[filename], + } + ) + + errors = response.get("Errors", []) + if errors: + for error in errors: + filename = error["Key"] + failed_deletes.append( + { + "filename": filename, + "sac_report_id": sac_to_report_id_map[filename], + "error_message": error["Message"], + } + ) + + return successful_deletes, failed_deletes + + except ClientError as e: + logger.error( + f"Failed to delete files from S3 for sac values: {[sac.report_id for sac in sac_list]}. Error: {e}" + ) + return [], [{"error_message": str(e)}] + except Exception as e: + logger.error(f"Failed to delete files from S3. Error: {e}") + return [], [{"error_message": str(e)}] + + +def delete_workbooks(partition_number, total_partitions, page_size=10, pages=None): + """Iterates over disseminated reports for the specified partition.""" + + if partition_number < 1 or partition_number > total_partitions: + raise ValueError( + "Invalid partition number. It must be between 1 and the total number of partitions." + ) + + all_ids = ( + SingleAuditChecklist.objects.filter(submission_status=STATUS.DISSEMINATED) + .values_list("id", flat=True) + .order_by("id") + ) + + total_ids = len(all_ids) + ids_per_partition = math.ceil(total_ids / total_partitions) + + start_index = (partition_number - 1) * ids_per_partition + end_index = min(partition_number * ids_per_partition, total_ids) + + ids_to_process = all_ids[start_index:end_index] + + sacs = SingleAuditChecklist.objects.filter(id__in=ids_to_process).order_by("id") + + paginator = Paginator(sacs, page_size) + total_pages = ( + paginator.num_pages if pages is None else min(pages, paginator.num_pages) + ) + + logger.info( + f"Retrieving {sacs.count()} reports for partition {partition_number} of {total_partitions}" + ) + + for page_number in range(1, total_pages + 1): + try: + page = paginator.page(page_number) + logger.info( + f"Processing page {page_number} with {page.object_list.count()} reports." + ) + + # Extract sac values from the current page + sac_list = list(page.object_list) + clean_artifacts(sac_list) + + except PageNotAnInteger: + logger.error(f"Page number {page_number} is not an integer.") + except EmptyPage: + logger.info(f"No more pages to process after page {page_number}.") + break + except Exception as e: + logger.error(f"An error occurred while processing page {page_number}: {e}") diff --git a/backend/dissemination/test_remove_workbook_artifacts.py b/backend/dissemination/test_remove_workbook_artifacts.py index 461b7e7ce..2311f6b0e 100644 --- a/backend/dissemination/test_remove_workbook_artifacts.py +++ b/backend/dissemination/test_remove_workbook_artifacts.py @@ -1,155 +1,155 @@ -from django.test import TestCase -from unittest.mock import patch -from audit.models.models import ExcelFile, SingleAuditChecklist -from model_bakery import baker - -from dissemination.remove_workbook_artifacts import ( - clean_artifacts, - delete_workbooks, - remove_workbook_artifacts, -) - - -class RemovedWorkbookArtifactsTestCase(TestCase): - - @patch("dissemination.remove_workbook_artifacts.delete_files_in_bulk") - def test_removed_workbook_artifacts_success(self, mock_delete_files_in_bulk): - sac = baker.make( - SingleAuditChecklist, - submission_status=SingleAuditChecklist.STATUS.IN_PROGRESS, - report_id="test_report_id", - ) - - # Create ExcelFile instances - excel_file_1 = baker.make(ExcelFile, sac=sac, form_section="fake_section") - excel_file_2 = baker.make( - ExcelFile, sac=sac, form_section="another_fake_section" - ) - - remove_workbook_artifacts(sac) - - # Assert that the ExcelFile instances are not deleted - self.assertTrue(ExcelFile.objects.filter(sac=sac).exists()) - - # Assert S3 bulk delete was called with the correct filenames - mock_delete_files_in_bulk.assert_called_once_with( - [ - f"excel/{sac.report_id}--{excel_file_1.form_section}.xlsx", - f"excel/{sac.report_id}--{excel_file_2.form_section}.xlsx", - ], - sac, - ) - - @patch("dissemination.remove_workbook_artifacts.delete_files_in_bulk") - def test_removed_workbook_artifacts_no_files(self, mock_delete_files_in_bulk): - sac = baker.make( - SingleAuditChecklist, - submission_status=SingleAuditChecklist.STATUS.IN_PROGRESS, - report_id="test_report_id", - ) - - # Ensure no ExcelFile instances exist for this SAC - ExcelFile.objects.filter(sac=sac).delete() - - remove_workbook_artifacts(sac) - - # Assert that no ExcelFile instances exist - self.assertFalse(ExcelFile.objects.filter(sac=sac).exists()) - - # Assert S3 bulk delete was not called - mock_delete_files_in_bulk.assert_not_called() - - -class CleanArtifactsTestCase(TestCase): - - @patch("dissemination.remove_workbook_artifacts.batch_removal") - def test_clean_artifacts_success(self, mock_batch_removal): - # Create SAC instances - sac_1 = baker.make(SingleAuditChecklist, report_id="report_id_1") - sac_2 = baker.make(SingleAuditChecklist, report_id="report_id_2") - - # Create ExcelFile instances - excel_file_1 = baker.make(ExcelFile, sac=sac_1, form_section="section_1") - excel_file_2 = baker.make(ExcelFile, sac=sac_2, form_section="section_2") - - sac_list = [sac_1, sac_2] - clean_artifacts(sac_list) - - # Assert that the ExcelFile instances still exist (no deletion) - self.assertTrue(ExcelFile.objects.filter(sac__in=sac_list).exists()) - - # Assert S3 bulk delete was called with the correct filenames - mock_batch_removal.assert_called_once_with( - [ - f"excel/{sac_1.report_id}--{excel_file_1.form_section}.xlsx", - f"excel/{sac_2.report_id}--{excel_file_2.form_section}.xlsx", - ], - sac_list, - { - f"excel/{sac_1.report_id}--{excel_file_1.form_section}.xlsx": sac_1.report_id, - f"excel/{sac_2.report_id}--{excel_file_2.form_section}.xlsx": sac_2.report_id, - }, - ) - - @patch("dissemination.remove_workbook_artifacts.batch_removal") - def test_clean_artifacts_no_files(self, mock_batch_removal): - sac = baker.make(SingleAuditChecklist, report_id="test_report_id") - - # Ensure no ExcelFile instances exist for this SAC - ExcelFile.objects.filter(sac=sac).delete() - - clean_artifacts([sac]) - - # Assert that no ExcelFile instances exist - self.assertFalse(ExcelFile.objects.filter(sac=sac).exists()) - - # Assert S3 bulk delete was not called - mock_batch_removal.assert_not_called() - - -class DeleteWorkbooksTestCase(TestCase): - - def setUp(self): - # Common setup for SAC instances - self.sac_1 = baker.make(SingleAuditChecklist, id=1, report_id="report_1") - self.sac_2 = baker.make(SingleAuditChecklist, id=2, report_id="report_2") - # Create associated ExcelFile instances - self.excel_file_1 = baker.make( - ExcelFile, sac=self.sac_1, form_section="section_1" - ) - self.excel_file_2 = baker.make( - ExcelFile, sac=self.sac_2, form_section="section_2" - ) - # Update submission status to DISSEMINATED - self.sac_1.submission_status = SingleAuditChecklist.STATUS.DISSEMINATED - self.sac_2.submission_status = SingleAuditChecklist.STATUS.DISSEMINATED - self.sac_1.save() - self.sac_2.save() - - @patch("dissemination.remove_workbook_artifacts.clean_artifacts") - def test_delete_workbooks_single_page(self, mock_clean_artifacts): - """Test delete_workbooks with a single page of workbooks""" - delete_workbooks(partition_number=1, total_partitions=1, page_size=10, pages=1) - - mock_clean_artifacts.assert_called_once_with([self.sac_1, self.sac_2]) - - @patch("dissemination.remove_workbook_artifacts.clean_artifacts") - def test_delete_workbooks_multiple_pages(self, mock_clean_artifacts): - """Test delete_workbooks with multiple pages of workbooks""" - delete_workbooks(partition_number=1, total_partitions=1, page_size=1, pages=2) - - self.assertEqual(mock_clean_artifacts.call_count, 2) - - mock_clean_artifacts.assert_any_call([self.sac_1]) - mock_clean_artifacts.assert_any_call([self.sac_2]) - - @patch("dissemination.remove_workbook_artifacts.clean_artifacts") - def test_delete_workbooks_all_pages(self, mock_clean_artifacts): - """Test delete_workbooks with all pages of workbooks""" - - delete_workbooks(partition_number=1, total_partitions=1, page_size=1) - - self.assertEqual(mock_clean_artifacts.call_count, 2) - - mock_clean_artifacts.assert_any_call([self.sac_1]) - mock_clean_artifacts.assert_any_call([self.sac_2]) +from django.test import TestCase +from unittest.mock import patch +from audit.models.models import ExcelFile, SingleAuditChecklist, STATUS +from model_bakery import baker + +from dissemination.remove_workbook_artifacts import ( + clean_artifacts, + delete_workbooks, + remove_workbook_artifacts, +) + + +class RemovedWorkbookArtifactsTestCase(TestCase): + + @patch("dissemination.remove_workbook_artifacts.delete_files_in_bulk") + def test_removed_workbook_artifacts_success(self, mock_delete_files_in_bulk): + sac = baker.make( + SingleAuditChecklist, + submission_status=STATUS.IN_PROGRESS, + report_id="test_report_id", + ) + + # Create ExcelFile instances + excel_file_1 = baker.make(ExcelFile, sac=sac, form_section="fake_section") + excel_file_2 = baker.make( + ExcelFile, sac=sac, form_section="another_fake_section" + ) + + remove_workbook_artifacts(sac) + + # Assert that the ExcelFile instances are not deleted + self.assertTrue(ExcelFile.objects.filter(sac=sac).exists()) + + # Assert S3 bulk delete was called with the correct filenames + mock_delete_files_in_bulk.assert_called_once_with( + [ + f"excel/{sac.report_id}--{excel_file_1.form_section}.xlsx", + f"excel/{sac.report_id}--{excel_file_2.form_section}.xlsx", + ], + sac, + ) + + @patch("dissemination.remove_workbook_artifacts.delete_files_in_bulk") + def test_removed_workbook_artifacts_no_files(self, mock_delete_files_in_bulk): + sac = baker.make( + SingleAuditChecklist, + submission_status=STATUS.IN_PROGRESS, + report_id="test_report_id", + ) + + # Ensure no ExcelFile instances exist for this SAC + ExcelFile.objects.filter(sac=sac).delete() + + remove_workbook_artifacts(sac) + + # Assert that no ExcelFile instances exist + self.assertFalse(ExcelFile.objects.filter(sac=sac).exists()) + + # Assert S3 bulk delete was not called + mock_delete_files_in_bulk.assert_not_called() + + +class CleanArtifactsTestCase(TestCase): + + @patch("dissemination.remove_workbook_artifacts.batch_removal") + def test_clean_artifacts_success(self, mock_batch_removal): + # Create SAC instances + sac_1 = baker.make(SingleAuditChecklist, report_id="report_id_1") + sac_2 = baker.make(SingleAuditChecklist, report_id="report_id_2") + + # Create ExcelFile instances + excel_file_1 = baker.make(ExcelFile, sac=sac_1, form_section="section_1") + excel_file_2 = baker.make(ExcelFile, sac=sac_2, form_section="section_2") + + sac_list = [sac_1, sac_2] + clean_artifacts(sac_list) + + # Assert that the ExcelFile instances still exist (no deletion) + self.assertTrue(ExcelFile.objects.filter(sac__in=sac_list).exists()) + + # Assert S3 bulk delete was called with the correct filenames + mock_batch_removal.assert_called_once_with( + [ + f"excel/{sac_1.report_id}--{excel_file_1.form_section}.xlsx", + f"excel/{sac_2.report_id}--{excel_file_2.form_section}.xlsx", + ], + sac_list, + { + f"excel/{sac_1.report_id}--{excel_file_1.form_section}.xlsx": sac_1.report_id, + f"excel/{sac_2.report_id}--{excel_file_2.form_section}.xlsx": sac_2.report_id, + }, + ) + + @patch("dissemination.remove_workbook_artifacts.batch_removal") + def test_clean_artifacts_no_files(self, mock_batch_removal): + sac = baker.make(SingleAuditChecklist, report_id="test_report_id") + + # Ensure no ExcelFile instances exist for this SAC + ExcelFile.objects.filter(sac=sac).delete() + + clean_artifacts([sac]) + + # Assert that no ExcelFile instances exist + self.assertFalse(ExcelFile.objects.filter(sac=sac).exists()) + + # Assert S3 bulk delete was not called + mock_batch_removal.assert_not_called() + + +class DeleteWorkbooksTestCase(TestCase): + + def setUp(self): + # Common setup for SAC instances + self.sac_1 = baker.make(SingleAuditChecklist, id=1, report_id="report_1") + self.sac_2 = baker.make(SingleAuditChecklist, id=2, report_id="report_2") + # Create associated ExcelFile instances + self.excel_file_1 = baker.make( + ExcelFile, sac=self.sac_1, form_section="section_1" + ) + self.excel_file_2 = baker.make( + ExcelFile, sac=self.sac_2, form_section="section_2" + ) + # Update submission status to DISSEMINATED + self.sac_1.submission_status = STATUS.DISSEMINATED + self.sac_2.submission_status = STATUS.DISSEMINATED + self.sac_1.save() + self.sac_2.save() + + @patch("dissemination.remove_workbook_artifacts.clean_artifacts") + def test_delete_workbooks_single_page(self, mock_clean_artifacts): + """Test delete_workbooks with a single page of workbooks""" + delete_workbooks(partition_number=1, total_partitions=1, page_size=10, pages=1) + + mock_clean_artifacts.assert_called_once_with([self.sac_1, self.sac_2]) + + @patch("dissemination.remove_workbook_artifacts.clean_artifacts") + def test_delete_workbooks_multiple_pages(self, mock_clean_artifacts): + """Test delete_workbooks with multiple pages of workbooks""" + delete_workbooks(partition_number=1, total_partitions=1, page_size=1, pages=2) + + self.assertEqual(mock_clean_artifacts.call_count, 2) + + mock_clean_artifacts.assert_any_call([self.sac_1]) + mock_clean_artifacts.assert_any_call([self.sac_2]) + + @patch("dissemination.remove_workbook_artifacts.clean_artifacts") + def test_delete_workbooks_all_pages(self, mock_clean_artifacts): + """Test delete_workbooks with all pages of workbooks""" + + delete_workbooks(partition_number=1, total_partitions=1, page_size=1) + + self.assertEqual(mock_clean_artifacts.call_count, 2) + + mock_clean_artifacts.assert_any_call([self.sac_1]) + mock_clean_artifacts.assert_any_call([self.sac_2]) diff --git a/backend/requirements.txt b/backend/requirements.txt index 7e82c0675..96988f9a9 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -247,7 +247,9 @@ django==5.1 \ # django-cors-headers # django-csp # django-dbbackup + # django-filter # django-storages + # django-viewflow # djangorestframework # djangorestframework-simplejwt django-cache-url==3.4.5 \ @@ -266,6 +268,12 @@ django-dbbackup==4.1.0 \ --hash=sha256:c411d38d0f8e60ab3254017278c14ebd75d4001b5634fc73be7fbe8a5260583b \ --hash=sha256:c539b5246b429a22a8efadbab3719ee6b8eda45c66c4ff6592056c590d51c782 # via -r ./requirements/requirements.in +django-filter==24.3 \ + --hash=sha256:c4852822928ce17fb699bcfccd644b3574f1a2d80aeb2b4ff4f16b02dd49dc64 \ + --hash=sha256:d8ccaf6732afd21ca0542f6733b11591030fa98669f8d15599b358e24a2cd9c3 + # via + # -r ./requirements/requirements.in + # django-viewflow django-fsm==3.0.0 \ --hash=sha256:0112bcac573ad14051cf8ebe73bf296b6d5409f093e5f1677eb16e2196e263b3 \ --hash=sha256:fa28f84f47eae7ce9247585ac6c1895e4ada08efff93fb243a59e9ff77b2d4ec @@ -274,6 +282,10 @@ django-storages[boto3]==1.14.2 \ --hash=sha256:1db759346b52ada6c2efd9f23d8241ecf518813eb31db9e2589207174f58f6ad \ --hash=sha256:51b36af28cc5813b98d5f3dfe7459af638d84428c8df4a03990c7d74d1bea4e5 # via -r ./requirements/requirements.in +django-viewflow==2.2.7 \ + --hash=sha256:38c8493dc25efc49df2003777b951980b773b8bb8e31926dd591e9fe0e8acb91 \ + --hash=sha256:c81a91d55e235c9bd75dc26bbc26dcfdda5f21eb97a381537040d9b7b07221cc + # via -r ./requirements/requirements.in djangorestframework==3.15.2 \ --hash=sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20 \ --hash=sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad diff --git a/backend/requirements/requirements.in b/backend/requirements/requirements.in index e14e3b9c0..74bcccf16 100644 --- a/backend/requirements/requirements.in +++ b/backend/requirements/requirements.in @@ -4,11 +4,13 @@ cryptography>=42.0.5 django-cors-headers django-csp django-dbbackup +django-filter +django-fsm django-storages[boto3] Django>=5.0.8 djangorestframework>=3.15.2 djangorestframework-simplejwt -django-fsm +django-viewflow environs[django] faker fs diff --git a/backend/support/management/commands/check_cog_over_for_year.py b/backend/support/management/commands/check_cog_over_for_year.py index 1e3e88e46..4b75e2621 100644 --- a/backend/support/management/commands/check_cog_over_for_year.py +++ b/backend/support/management/commands/check_cog_over_for_year.py @@ -5,6 +5,7 @@ from dissemination.models import General, FederalAward from audit.models import SingleAuditChecklist, User +from audit.models.models import STATUS from support.models import CognizantAssignment from config.settings import ENVIRONMENT @@ -44,7 +45,7 @@ def handle(self, *args, **options): for gen in gens: sac = self.make_sac(gen) - sac.submission_status = sac.STATUS.SUBMITTED + sac.submission_status = STATUS.SUBMITTED sac.save() if not sac.cognizant_agency and not sac.oversight_agency: sac.assign_cog_over()