Skip to content

Commit

Permalink
merge from main
Browse files Browse the repository at this point in the history
  • Loading branch information
Kenneth Kehl committed Oct 29, 2024
2 parents f8f4e46 + f782f1e commit e5117bb
Show file tree
Hide file tree
Showing 31 changed files with 1,839 additions and 987 deletions.
8 changes: 4 additions & 4 deletions .ds.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@
"filename": "tests/app/dao/test_services_dao.py",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 265,
"line_number": 289,
"is_secret": false
}
],
Expand All @@ -249,15 +249,15 @@
"filename": "tests/app/dao/test_users_dao.py",
"hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
"is_verified": false,
"line_number": 52,
"line_number": 69,
"is_secret": false
},
{
"type": "Secret Keyword",
"filename": "tests/app/dao/test_users_dao.py",
"hashed_secret": "f2c57870308dc87f432e5912d4de6f8e322721ba",
"is_verified": false,
"line_number": 176,
"line_number": 199,
"is_secret": false
}
],
Expand Down Expand Up @@ -384,5 +384,5 @@
}
]
},
"generated_at": "2024-09-27T16:42:53Z"
"generated_at": "2024-10-28T20:26:27Z"
}
2 changes: 1 addition & 1 deletion .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ jobs:
NOTIFY_E2E_TEST_PASSWORD: ${{ secrets.NOTIFY_E2E_TEST_PASSWORD }}
- name: Check coverage threshold
# TODO get this back up to 95
run: poetry run coverage report -m --fail-under=91
run: poetry run coverage report -m --fail-under=93

validate-new-relic-config:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ test: ## Run tests and create coverage report
poetry run coverage run --omit=*/migrations/*,*/tests/* -m pytest --maxfail=10

## TODO set this back to 95 asap
poetry run coverage report -m --fail-under=91
poetry run coverage report -m --fail-under=93
poetry run coverage html -d .coverage_cache

.PHONY: py-lock
Expand Down
18 changes: 1 addition & 17 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,23 +476,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):

set_job_cache(job_cache, f"{job_id}_personalisation", extract_personalisation(job))

# If we can find the quick dictionary, use it
if job_cache.get(f"{job_id}_personalisation") is not None:
personalisation_to_return = job_cache.get(f"{job_id}_personalisation")[0].get(
job_row_number
)
if personalisation_to_return:
return personalisation_to_return
else:
current_app.logger.warning(
f"Was unable to retrieve personalisation from lookup dictionary for job {job_id}"
)
return {}
else:
current_app.logger.error(
f"Was unable to construct lookup dictionary for job {job_id}"
)
return {}
return job_cache.get(f"{job_id}_personalisation")[0].get(job_row_number)


def get_job_metadata_from_s3(service_id, job_id):
Expand Down
79 changes: 2 additions & 77 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,6 @@
dao_create_or_update_annual_billing_for_year,
set_default_free_allowance_for_service,
)
from app.dao.fact_billing_dao import (
delete_billing_data_for_service_for_day,
fetch_billing_data_for_day,
get_service_ids_that_need_billing_populated,
update_fact_billing,
)
from app.dao.jobs_dao import dao_get_job_by_id
from app.dao.organization_dao import (
dao_add_service_to_organization,
Expand Down Expand Up @@ -63,7 +57,7 @@
TemplateHistory,
User,
)
from app.utils import get_midnight_in_utc, utc_now
from app.utils import utc_now
from notifications_utils.recipients import RecipientCSV
from notifications_utils.template import SMSMessageTemplate
from tests.app.db import (
Expand Down Expand Up @@ -167,6 +161,7 @@ def purge_functional_test_data(user_email_prefix):
delete_model_user(usr)


# TODO maintainability what is the purpose of this command? Who would use it and why?
@notify_command(name="insert-inbound-numbers")
@click.option(
"-f",
Expand All @@ -175,7 +170,6 @@ def purge_functional_test_data(user_email_prefix):
help="""Full path of the file to upload, file is a contains inbound numbers, one number per line.""",
)
def insert_inbound_numbers_from_file(file_name):
# TODO maintainability what is the purpose of this command? Who would use it and why?

current_app.logger.info(f"Inserting inbound numbers from {file_name}")
with open(file_name) as file:
Expand All @@ -195,50 +189,6 @@ def setup_commands(application):
application.cli.add_command(command_group)


@notify_command(name="rebuild-ft-billing-for-day")
@click.option("-s", "--service_id", required=False, type=click.UUID)
@click.option(
"-d",
"--day",
help="The date to recalculate, as YYYY-MM-DD",
required=True,
type=click_dt(format="%Y-%m-%d"),
)
def rebuild_ft_billing_for_day(service_id, day):
# TODO maintainability what is the purpose of this command? Who would use it and why?

"""
Rebuild the data in ft_billing for the given service_id and date
"""

def rebuild_ft_data(process_day, service):
deleted_rows = delete_billing_data_for_service_for_day(process_day, service)
current_app.logger.info(
f"deleted {deleted_rows} existing billing rows for {service} on {process_day}"
)
transit_data = fetch_billing_data_for_day(
process_day=process_day, service_id=service
)
# transit_data = every row that should exist
for data in transit_data:
# upsert existing rows
update_fact_billing(data, process_day)
current_app.logger.info(
f"added/updated {len(transit_data)} billing rows for {service} on {process_day}"
)

if service_id:
# confirm the service exists
dao_fetch_service_by_id(service_id)
rebuild_ft_data(day, service_id)
else:
services = get_service_ids_that_need_billing_populated(
get_midnight_in_utc(day), get_midnight_in_utc(day + timedelta(days=1))
)
for row in services:
rebuild_ft_data(day, row.service_id)


@notify_command(name="bulk-invite-user-to-service")
@click.option(
"-f",
Expand Down Expand Up @@ -472,31 +422,6 @@ def associate_services_to_organizations():
current_app.logger.info("finished associating services to organizations")


@notify_command(name="populate-service-volume-intentions")
@click.option(
"-f",
"--file_name",
required=True,
help="Pipe delimited file containing service_id, SMS, email",
)
def populate_service_volume_intentions(file_name):
# [0] service_id
# [1] SMS:: volume intentions for service
# [2] Email:: volume intentions for service

# TODO maintainability what is the purpose of this command? Who would use it and why?

with open(file_name, "r") as f:
for line in itertools.islice(f, 1, None):
columns = line.split(",")
current_app.logger.info(columns)
service = dao_fetch_service_by_id(columns[0])
service.volume_sms = columns[1]
service.volume_email = columns[2]
dao_update_service(service)
current_app.logger.info("populate-service-volume-intentions complete")


@notify_command(name="populate-go-live")
@click.option(
"-f", "--file_name", required=True, help="CSV file containing live service data"
Expand Down
Loading

0 comments on commit e5117bb

Please sign in to comment.