Skip to content

Commit

Permalink
Merge pull request #1388 from GSA/main
Browse files Browse the repository at this point in the history
11/04/2024 Production Deploy
  • Loading branch information
ccostino authored Nov 4, 2024
2 parents 69c8cba + 165042b commit ea5bf72
Show file tree
Hide file tree
Showing 57 changed files with 4,363 additions and 2,431 deletions.
10 changes: 5 additions & 5 deletions .ds.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@
"filename": "tests/app/aws/test_s3.py",
"hashed_secret": "67a74306b06d0c01624fe0d0249a570f4d093747",
"is_verified": false,
"line_number": 29,
"line_number": 40,
"is_secret": false
}
],
Expand Down Expand Up @@ -239,7 +239,7 @@
"filename": "tests/app/dao/test_services_dao.py",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 265,
"line_number": 289,
"is_secret": false
}
],
Expand All @@ -249,15 +249,15 @@
"filename": "tests/app/dao/test_users_dao.py",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 52,
"line_number": 69,
"is_secret": false
},
{
"type": "Secret Keyword",
"filename": "tests/app/dao/test_users_dao.py",
"hashed_secret": "f2c57870308dc87f432e5912d4de6f8e322721ba",
"is_verified": false,
"line_number": 176,
"line_number": 199,
"is_secret": false
}
],
Expand Down Expand Up @@ -384,5 +384,5 @@
}
]
},
"generated_at": "2024-09-27T16:42:53Z"
"generated_at": "2024-10-28T20:26:27Z"
}
2 changes: 1 addition & 1 deletion .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ jobs:
NOTIFY_E2E_TEST_PASSWORD: ${{ secrets.NOTIFY_E2E_TEST_PASSWORD }}
- name: Check coverage threshold
# TODO get this back up to 95
run: poetry run coverage report -m --fail-under=91
run: poetry run coverage report -m --fail-under=94

validate-new-relic-config:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ test: ## Run tests and create coverage report
poetry run coverage run --omit=*/migrations/*,*/tests/* -m pytest --maxfail=10

## TODO set this back to 95 asap
poetry run coverage report -m --fail-under=91
poetry run coverage report -m --fail-under=94
poetry run coverage html -d .coverage_cache

.PHONY: py-lock
Expand Down
24 changes: 6 additions & 18 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,13 @@ def get_s3_resource():
return s3_resource


def _get_bucket_name():
return current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]


def list_s3_objects():

bucket_name = current_app.config["CSV_UPLOAD_BUCKET"]["bucket"]
bucket_name = _get_bucket_name()
s3_client = get_s3_client()
# Our reports only support 7 days, but pull 8 days to avoid
# any edge cases
Expand Down Expand Up @@ -476,23 +480,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):

set_job_cache(job_cache, f"{job_id}_personalisation", extract_personalisation(job))

# If we can find the quick dictionary, use it
if job_cache.get(f"{job_id}_personalisation") is not None:
personalisation_to_return = job_cache.get(f"{job_id}_personalisation")[0].get(
job_row_number
)
if personalisation_to_return:
return personalisation_to_return
else:
current_app.logger.warning(
f"Was unable to retrieve personalisation from lookup dictionary for job {job_id}"
)
return {}
else:
current_app.logger.error(
f"Was unable to construct lookup dictionary for job {job_id}"
)
return {}
return job_cache.get(f"{job_id}_personalisation")[0].get(job_row_number)


def get_job_metadata_from_s3(service_id, job_id):
Expand Down
4 changes: 3 additions & 1 deletion app/clients/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
# This is the default but just for doc sake
# there may come a time when increasing this helps
# with job cache management.
max_pool_connections=10,
# max_pool_connections=10,
# Reducing to 4 connections due to BrokenPipeErrors
max_pool_connections=4,
)


Expand Down
79 changes: 2 additions & 77 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,6 @@
dao_create_or_update_annual_billing_for_year,
set_default_free_allowance_for_service,
)
from app.dao.fact_billing_dao import (
delete_billing_data_for_service_for_day,
fetch_billing_data_for_day,
get_service_ids_that_need_billing_populated,
update_fact_billing,
)
from app.dao.jobs_dao import dao_get_job_by_id
from app.dao.organization_dao import (
dao_add_service_to_organization,
Expand Down Expand Up @@ -63,7 +57,7 @@
TemplateHistory,
User,
)
from app.utils import get_midnight_in_utc, utc_now
from app.utils import utc_now
from notifications_utils.recipients import RecipientCSV
from notifications_utils.template import SMSMessageTemplate
from tests.app.db import (
Expand Down Expand Up @@ -167,6 +161,7 @@ def purge_functional_test_data(user_email_prefix):
delete_model_user(usr)


# TODO maintainability what is the purpose of this command? Who would use it and why?
@notify_command(name="insert-inbound-numbers")
@click.option(
"-f",
Expand All @@ -175,7 +170,6 @@ def purge_functional_test_data(user_email_prefix):
help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
)
def insert_inbound_numbers_from_file(file_name):
# TODO maintainability what is the purpose of this command? Who would use it and why?

current_app.logger.info(f"Inserting inbound numbers from {file_name}")
with open(file_name) as file:
Expand All @@ -195,50 +189,6 @@ def setup_commands(application):
application.cli.add_command(command_group)


@notify_command(name="rebuild-ft-billing-for-day")
@click.option("-s", "--service_id", required=False, type=click.UUID)
@click.option(
"-d",
"--day",
help="The date to recalculate, as YYYY-MM-DD",
required=True,
type=click_dt(format="%Y-%m-%d"),
)
def rebuild_ft_billing_for_day(service_id, day):
# TODO maintainability what is the purpose of this command? Who would use it and why?

"""
Rebuild the data in ft_billing for the given service_id and date
"""

def rebuild_ft_data(process_day, service):
deleted_rows = delete_billing_data_for_service_for_day(process_day, service)
current_app.logger.info(
f"deleted {deleted_rows} existing billing rows for {service} on {process_day}"
)
transit_data = fetch_billing_data_for_day(
process_day=process_day, service_id=service
)
# transit_data = every row that should exist
for data in transit_data:
# upsert existing rows
update_fact_billing(data, process_day)
current_app.logger.info(
f"added/updated {len(transit_data)} billing rows for {service} on {process_day}"
)

if service_id:
# confirm the service exists
dao_fetch_service_by_id(service_id)
rebuild_ft_data(day, service_id)
else:
services = get_service_ids_that_need_billing_populated(
get_midnight_in_utc(day), get_midnight_in_utc(day + timedelta(days=1))
)
for row in services:
rebuild_ft_data(day, row.service_id)


@notify_command(name="bulk-invite-user-to-service")
@click.option(
"-f",
Expand Down Expand Up @@ -472,31 +422,6 @@ def associate_services_to_organizations():
current_app.logger.info("finished associating services to organizations")


@notify_command(name="populate-service-volume-intentions")
@click.option(
"-f",
"--file_name",
required=True,
help="Pipe delimited file containing service_id, SMS, email",
)
def populate_service_volume_intentions(file_name):
# [0] service_id
# [1] SMS:: volume intentions for service
# [2] Email:: volume intentions for service

# TODO maintainability what is the purpose of this command? Who would use it and why?

with open(file_name, "r") as f:
for line in itertools.islice(f, 1, None):
columns = line.split(",")
current_app.logger.info(columns)
service = dao_fetch_service_by_id(columns[0])
service.volume_sms = columns[1]
service.volume_email = columns[2]
dao_update_service(service)
current_app.logger.info("populate-service-volume-intentions complete")


@notify_command(name="populate-go-live")
@click.option(
"-f", "--file_name", required=True, help="CSV file containing live service data"
Expand Down
34 changes: 22 additions & 12 deletions app/dao/annual_billing_dao.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from flask import current_app
from sqlalchemy import select, update

from app import db
from app.dao.dao_utils import autocommit
Expand Down Expand Up @@ -26,42 +27,51 @@ def dao_create_or_update_annual_billing_for_year(


def dao_get_annual_billing(service_id):
return (
AnnualBilling.query.filter_by(
stmt = (
select(AnnualBilling)
.filter_by(
service_id=service_id,
)
.order_by(AnnualBilling.financial_year_start)
.all()
)
return db.session.execute(stmt).scalars().all()


@autocommit
def dao_update_annual_billing_for_future_years(
service_id, free_sms_fragment_limit, financial_year_start
):
AnnualBilling.query.filter(
AnnualBilling.service_id == service_id,
AnnualBilling.financial_year_start > financial_year_start,
).update({"free_sms_fragment_limit": free_sms_fragment_limit})
stmt = (
update(AnnualBilling)
.filter(
AnnualBilling.service_id == service_id,
AnnualBilling.financial_year_start > financial_year_start,
)
.values({"free_sms_fragment_limit": free_sms_fragment_limit})
)
db.session.execute(stmt)
db.session.commit()


def dao_get_free_sms_fragment_limit_for_year(service_id, financial_year_start=None):
if not financial_year_start:
financial_year_start = get_current_calendar_year_start_year()

return AnnualBilling.query.filter_by(
stmt = select(AnnualBilling).filter_by(
service_id=service_id, financial_year_start=financial_year_start
).first()
)
return db.session.execute(stmt).scalars().first()


def dao_get_all_free_sms_fragment_limit(service_id):
return (
AnnualBilling.query.filter_by(
stmt = (
select(AnnualBilling)
.filter_by(
service_id=service_id,
)
.order_by(AnnualBilling.financial_year_start)
.all()
)
return db.session.execute(stmt).scalars().all()


def set_default_free_allowance_for_service(service, year_start=None):
Expand Down
32 changes: 23 additions & 9 deletions app/dao/complaint_dao.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from datetime import timedelta

from flask import current_app
from sqlalchemy import desc
from sqlalchemy import desc, func, select

from app import db
from app.dao.dao_utils import autocommit
from app.dao.inbound_sms_dao import Pagination
from app.models import Complaint
from app.utils import get_midnight_in_utc

Expand All @@ -15,23 +16,36 @@ def save_complaint(complaint):


def fetch_paginated_complaints(page=1):
return Complaint.query.order_by(desc(Complaint.created_at)).paginate(
page=page, per_page=current_app.config["PAGE_SIZE"]
page_size = current_app.config["PAGE_SIZE"]
total_count = db.session.scalar(select(func.count()).select_from(Complaint))
offset = (page - 1) * page_size
stmt = (
select(Complaint)
.order_by(desc(Complaint.created_at))
.offset(offset)
.limit(page_size)
)
result = db.session.execute(stmt).scalars().all()
pagination = Pagination(result, page=page, per_page=page_size, total=total_count)
return pagination


def fetch_complaints_by_service(service_id):
return (
Complaint.query.filter_by(service_id=service_id)
stmt = (
select(Complaint)
.filter_by(service_id=service_id)
.order_by(desc(Complaint.created_at))
.all()
)
return db.session.execute(stmt).scalars().all()


def fetch_count_of_complaints(start_date, end_date):
start_date = get_midnight_in_utc(start_date)
end_date = get_midnight_in_utc(end_date + timedelta(days=1))

return Complaint.query.filter(
Complaint.created_at >= start_date, Complaint.created_at < end_date
).count()
stmt = (
select(func.count())
.select_from(Complaint)
.filter(Complaint.created_at >= start_date, Complaint.created_at < end_date)
)
return db.session.execute(stmt).scalar() or 0
Loading

0 comments on commit ea5bf72

Please sign in to comment.