From b2ed52ffd4fda8004ec0f18d6c3a48e9accebe9a Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 07:41:26 -0700 Subject: [PATCH 01/45] upgrade test queries to sqlalchemy 2.0 --- tests/app/celery/test_tasks.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 593926c18..c2dff1eb3 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -8,9 +8,10 @@ from celery.exceptions import Retry from freezegun import freeze_time from requests import RequestException +from sqlalchemy import func, select from sqlalchemy.exc import SQLAlchemyError -from app import encryption +from app import db, encryption from app.celery import provider_tasks, tasks from app.celery.tasks import ( get_recipient_csv_and_template_and_sender_id, @@ -1166,7 +1167,13 @@ def test_process_incomplete_job_sms(mocker, sample_template): create_notification(sample_template, job, 0) create_notification(sample_template, job, 1) - assert Notification.query.filter(Notification.job_id == job.id).count() == 2 + stmt = ( + select(func.count()) + .select_from(Notification) + .where(Notification.job_id == job.id) + ) + result = db.session.execute(stmt) + assert result.rowcount == 2 process_incomplete_job(str(job.id)) From 431d51dd631437d0a90c6e8f484ade4667080367 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 08:16:17 -0700 Subject: [PATCH 02/45] remove query.filters from test_tasks --- tests/app/celery/test_tasks.py | 56 +++++++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 15 deletions(-) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index c2dff1eb3..67dd5d4e7 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -1172,12 +1172,13 @@ def test_process_incomplete_job_sms(mocker, sample_template): .select_from(Notification) .where(Notification.job_id == job.id) ) - result = db.session.execute(stmt) - assert result.rowcount == 2 + count = db.session.execute(stmt).scalar() + assert count == 2 process_incomplete_job(str(job.id)) - completed_job = Job.query.filter(Job.id == job.id).one() + stmt = select(Job).where(Job.id == job.id) + completed_job = db.session.execute(stmt).scalars().one() assert completed_job.job_status == JobStatus.FINISHED @@ -1213,11 +1214,17 @@ def test_process_incomplete_job_with_notifications_all_sent(mocker, sample_templ create_notification(sample_template, job, 8) create_notification(sample_template, job, 9) - assert Notification.query.filter(Notification.job_id == job.id).count() == 10 + stmt = ( + select(func.count()) + .select_from(Notification) + .where(Notification.job_id == job.id) + ) + assert db.session.execute(stmt).scalar() == 10 process_incomplete_job(str(job.id)) - completed_job = Job.query.filter(Job.id == job.id).one() + stmt = select(Job).where(Job.id == job.id) + completed_job = db.session.execute(stmt).scalars().one() assert completed_job.job_status == JobStatus.FINISHED @@ -1245,7 +1252,12 @@ def test_process_incomplete_jobs_sms(mocker, sample_template): create_notification(sample_template, job, 1) create_notification(sample_template, job, 2) - assert Notification.query.filter(Notification.job_id == job.id).count() == 3 + stmt = ( + select(func.count()) + .select_from(Notification) + .where(Notification.job_id == job.id) + ) + assert db.session.execute(stmt).scalar() == 3 job2 = create_job( template=sample_template, @@ -1262,13 +1274,21 @@ def test_process_incomplete_jobs_sms(mocker, sample_template): create_notification(sample_template, job2, 3) create_notification(sample_template, job2, 4) - assert Notification.query.filter(Notification.job_id == job2.id).count() == 5 + stmt = ( + select(func.count()) + .select_from(Notification) + .where(Notification.job_id == job2.id) + ) + + assert db.session.execute(stmt).scalar() == 5 jobs = [job.id, job2.id] process_incomplete_jobs(jobs) - completed_job = Job.query.filter(Job.id == job.id).one() - completed_job2 = Job.query.filter(Job.id == job2.id).one() + stmt = select(Job).where(Job.id == job.id) + completed_job = db.session.execute(stmt).scalars().one() + stmt = select(Job).where(Job.id == job2.id) + completed_job2 = db.session.execute(stmt).scalars().one() assert completed_job.job_status == JobStatus.FINISHED @@ -1294,12 +1314,16 @@ def test_process_incomplete_jobs_no_notifications_added(mocker, sample_template) processing_started=utc_now() - timedelta(minutes=31), job_status=JobStatus.ERROR, ) - - assert Notification.query.filter(Notification.job_id == job.id).count() == 0 + stmt = ( + select(func.count()) + .select_from(Notification) + .where(Notification.job_id == job.id) + ) + assert db.session.execute(stmt).scalar() == 0 process_incomplete_job(job.id) - - completed_job = Job.query.filter(Job.id == job.id).one() + stmt = select(Job).where(Job.id == job.id) + completed_job = db.session.execute(stmt).scalars().one() assert completed_job.job_status == JobStatus.FINISHED @@ -1355,11 +1379,13 @@ def test_process_incomplete_job_email(mocker, sample_email_template): create_notification(sample_email_template, job, 0) create_notification(sample_email_template, job, 1) - assert Notification.query.filter(Notification.job_id == job.id).count() == 2 + stmt = select(Notification).where(Notification.job_id == job.id) + assert db.session.execute(stmt).scalar() == 2 process_incomplete_job(str(job.id)) - completed_job = Job.query.filter(Job.id == job.id).one() + stmt = select(Job).where(Job.id == job.id) + completed_job = db.session.execute(stmt).scalars().one() assert completed_job.job_status == JobStatus.FINISHED From b92253b782f53143dd7953c3ad1c84eed92a5756 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 08:31:15 -0700 Subject: [PATCH 03/45] remove query.filters from test_tasks --- tests/app/celery/test_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 67dd5d4e7..84ac83668 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -1379,7 +1379,7 @@ def test_process_incomplete_job_email(mocker, sample_email_template): create_notification(sample_email_template, job, 0) create_notification(sample_email_template, job, 1) - stmt = select(Notification).where(Notification.job_id == job.id) + stmt = select(func.count()).select_from(Notification).where(Notification.job_id == job.id) assert db.session.execute(stmt).scalar() == 2 process_incomplete_job(str(job.id)) From 36251ab95d84ed9190a234652a55fd6af2479286 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 08:54:48 -0700 Subject: [PATCH 04/45] fix more --- tests/app/celery/test_tasks.py | 6 +++++- tests/app/test_commands.py | 31 ++++++++++++++++++++----------- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index 84ac83668..a0fd70584 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -1379,7 +1379,11 @@ def test_process_incomplete_job_email(mocker, sample_email_template): create_notification(sample_email_template, job, 0) create_notification(sample_email_template, job, 1) - stmt = select(func.count()).select_from(Notification).where(Notification.job_id == job.id) + stmt = ( + select(func.count()) + .select_from(Notification) + .where(Notification.job_id == job.id) + ) assert db.session.execute(stmt).scalar() == 2 process_incomplete_job(str(job.id)) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 690532da9..a3e09b687 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -3,7 +3,9 @@ from unittest.mock import MagicMock, mock_open import pytest +from sqlalchemy import select +from app import db from app.commands import ( _update_template, bulk_invite_user_to_service, @@ -115,7 +117,8 @@ def test_update_jobs_archived_flag(notify_db_session, notify_api): right_now = right_now.strftime("%Y-%m-%d") tomorrow = tomorrow.strftime("%Y-%m-%d") - archived_jobs = Job.query.filter(Job.archived is True).count() + stmt = select(Job).where(Job.archived is True) + archived_jobs = db.session.execute(stmt).scalar() assert archived_jobs == 0 notify_api.test_cli_runner().invoke( @@ -242,7 +245,8 @@ def test_create_test_user_command(notify_db_session, notify_api): assert User.query.count() == user_count + 1 # that user should be the one we added - user = User.query.filter_by(name="Fake Personson").first() + stmt = select(User).where(name="Fake Personson") + user = db.session.execute(stmt).first() assert user.email_address == "somebody@fake.gov" assert user.auth_type == AuthType.SMS assert user.state == "active" @@ -281,10 +285,11 @@ def test_populate_annual_billing_with_defaults( populate_annual_billing_with_defaults, ["-y", 2022] ) - results = AnnualBilling.query.filter( + stmt = select(AnnualBilling).where( AnnualBilling.financial_year_start == 2022, AnnualBilling.service_id == service.id, - ).all() + ) + results = db.session.execute(stmt).scalars().all() assert len(results) == 1 assert results[0].free_sms_fragment_limit == expected_allowance @@ -306,10 +311,11 @@ def test_populate_annual_billing_with_the_previous_years_allowance( populate_annual_billing_with_defaults, ["-y", 2022] ) - results = AnnualBilling.query.filter( + stmt = select(AnnualBilling).where( AnnualBilling.financial_year_start == 2022, AnnualBilling.service_id == service.id, - ).all() + ) + results = db.session.execute(stmt).scalars().all() assert len(results) == 1 assert results[0].free_sms_fragment_limit == expected_allowance @@ -318,10 +324,11 @@ def test_populate_annual_billing_with_the_previous_years_allowance( populate_annual_billing_with_the_previous_years_allowance, ["-y", 2023] ) - results = AnnualBilling.query.filter( + stmt = select(AnnualBilling).where( AnnualBilling.financial_year_start == 2023, AnnualBilling.service_id == service.id, - ).all() + ) + results = db.session.execute(stmt).scalars().all() assert len(results) == 1 assert results[0].free_sms_fragment_limit == expected_allowance @@ -355,10 +362,11 @@ def test_populate_annual_billing_with_defaults_sets_free_allowance_to_zero_if_pr populate_annual_billing_with_defaults, ["-y", 2022] ) - results = AnnualBilling.query.filter( + stmt = select(AnnualBilling).where( AnnualBilling.financial_year_start == 2022, AnnualBilling.service_id == service.id, - ).all() + ) + results = db.session.execute(stmt).scalars().all() assert len(results) == 1 assert results[0].free_sms_fragment_limit == 0 @@ -410,7 +418,8 @@ def test_create_service_command(notify_db_session, notify_api): assert Service.query.count() == service_count + 1 # that service should be the one we added - service = Service.query.filter_by(name="Fake Service").first() + stmt = select(Service).where(name="Fake Service") + service = db.session.execute(stmt).first() assert service.email_from == "somebody@fake.gov" assert service.restricted is False assert service.message_limit == 40000 From bcd5e206ea9b1eca0b5c3574cde2204e0357fe5e Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 09:08:58 -0700 Subject: [PATCH 05/45] fix more --- tests/app/test_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index a3e09b687..f497485de 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -418,7 +418,7 @@ def test_create_service_command(notify_db_session, notify_api): assert Service.query.count() == service_count + 1 # that service should be the one we added - stmt = select(Service).where(name="Fake Service") + stmt = select(Service).where(Service.name == "Fake Service") service = db.session.execute(stmt).first() assert service.email_from == "somebody@fake.gov" assert service.restricted is False From f002a6c34135283c90b5d93339fe4ef6bbc014fe Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 09:20:26 -0700 Subject: [PATCH 06/45] fix more --- tests/app/test_commands.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index f497485de..7a8bc59f3 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -118,7 +118,7 @@ def test_update_jobs_archived_flag(notify_db_session, notify_api): tomorrow = tomorrow.strftime("%Y-%m-%d") stmt = select(Job).where(Job.archived is True) - archived_jobs = db.session.execute(stmt).scalar() + archived_jobs = db.session.execute(stmt).scalar() or 0 assert archived_jobs == 0 notify_api.test_cli_runner().invoke( @@ -245,7 +245,7 @@ def test_create_test_user_command(notify_db_session, notify_api): assert User.query.count() == user_count + 1 # that user should be the one we added - stmt = select(User).where(name="Fake Personson") + stmt = select(User).where(User.name == "Fake Personson") user = db.session.execute(stmt).first() assert user.email_address == "somebody@fake.gov" assert user.auth_type == AuthType.SMS @@ -419,7 +419,7 @@ def test_create_service_command(notify_db_session, notify_api): # that service should be the one we added stmt = select(Service).where(Service.name == "Fake Service") - service = db.session.execute(stmt).first() + service = db.session.execute(stmt).scalars().first() assert service.email_from == "somebody@fake.gov" assert service.restricted is False assert service.message_limit == 40000 From a2e6e06d3fe5876afa7516a645c35d273b59e42c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 09:28:12 -0700 Subject: [PATCH 07/45] fix more --- tests/app/test_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 7a8bc59f3..361de9a40 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -246,7 +246,7 @@ def test_create_test_user_command(notify_db_session, notify_api): # that user should be the one we added stmt = select(User).where(User.name == "Fake Personson") - user = db.session.execute(stmt).first() + user = db.session.execute(stmt).scalars().first() assert user.email_address == "somebody@fake.gov" assert user.auth_type == AuthType.SMS assert user.state == "active" From 08c9bf54d1cfe65977ad446369de20bd335ac028 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 09:36:11 -0700 Subject: [PATCH 08/45] fix more --- tests/app/dao/test_provider_details_dao.py | 23 +++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index fd8f4a43d..624ce2bb5 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -2,8 +2,9 @@ import pytest from freezegun import freeze_time +from sqlalchemy import delete, select -from app import notification_provider_clients +from app import db, notification_provider_clients from app.dao.provider_details_dao import ( _get_sms_providers_for_update, dao_get_provider_stats, @@ -65,17 +66,20 @@ def test_can_get_email_providers(notify_db_session): def test_should_not_error_if_any_provider_in_code_not_in_database( restore_provider_details, ): - ProviderDetails.query.filter_by(identifier="sns").delete() + stmt = delete(ProviderDetails).where(ProviderDetails.identifier == "sns") + db.session.execute(stmt) + db.session.commit() + # ProviderDetails.query.filter_by(identifier="sns").delete() assert notification_provider_clients.get_sms_client("sns") @freeze_time("2000-01-01T00:00:00") def test_update_adds_history(restore_provider_details): - ses = ProviderDetails.query.filter(ProviderDetails.identifier == "ses").one() - ses_history = ProviderDetailsHistory.query.filter( - ProviderDetailsHistory.id == ses.id - ).one() + stmt = select(ProviderDetails).where(ProviderDetails.identifier == "ses") + ses = db.session.execute(stmt).scalars().one() + stmt = select(ProviderDetailsHistory).where(ProviderDetailsHistory.id == ses.id) + ses_history = db.session.execute(stmt).scalars().one() assert ses.version == 1 assert ses_history.version == 1 @@ -88,11 +92,12 @@ def test_update_adds_history(restore_provider_details): assert not ses.active assert ses.updated_at == datetime(2000, 1, 1, 0, 0, 0) - ses_history = ( - ProviderDetailsHistory.query.filter(ProviderDetailsHistory.id == ses.id) + stmt = ( + select(ProviderDetailsHistory) + .where(ProviderDetailsHistory.id == ses.id) .order_by(ProviderDetailsHistory.version) - .all() ) + ses_history = db.session.execute(stmt).scalars().all() assert ses_history[0].active assert ses_history[0].version == 1 From 79ceddfee40be0f92a4911106911cefd160210b0 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 10:35:49 -0700 Subject: [PATCH 09/45] more fixes --- tests/app/dao/test_invited_user_dao.py | 70 +++++++++------------- tests/app/dao/test_provider_details_dao.py | 19 ++++-- tests/app/test_schemas.py | 13 ++-- 3 files changed, 49 insertions(+), 53 deletions(-) diff --git a/tests/app/dao/test_invited_user_dao.py b/tests/app/dao/test_invited_user_dao.py index da52e52e7..247a3dfda 100644 --- a/tests/app/dao/test_invited_user_dao.py +++ b/tests/app/dao/test_invited_user_dao.py @@ -2,6 +2,7 @@ from datetime import timedelta import pytest +from sqlalchemy import func, select from sqlalchemy.orm.exc import NoResultFound from app import db @@ -123,23 +124,17 @@ def test_should_delete_all_invitations_more_than_one_day_old( ): make_invitation(sample_user, sample_service, age=timedelta(hours=48)) make_invitation(sample_user, sample_service, age=timedelta(hours=48)) - assert ( - len( - InvitedUser.query.filter( - InvitedUser.status != InvitedUserStatus.EXPIRED - ).all() - ) - == 2 - ) + stmt = select(InvitedUser).where(InvitedUser.status != InvitedUserStatus.EXPIRED) + result = db.session.execute(stmt).scalars().all() + assert len(result) == 2 expire_invitations_created_more_than_two_days_ago() - assert ( - len( - InvitedUser.query.filter( - InvitedUser.status != InvitedUserStatus.EXPIRED - ).all() - ) - == 0 + stmt = ( + select(func.count()) + .select_from(InvitedUser) + .where(InvitedUser.status != InvitedUserStatus.EXPIRED) ) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 def test_should_not_delete_invitations_less_than_two_days_old( @@ -160,35 +155,28 @@ def test_should_not_delete_invitations_less_than_two_days_old( email_address="expired@1.com", ) - assert ( - len( - InvitedUser.query.filter( - InvitedUser.status != InvitedUserStatus.EXPIRED - ).all() - ) - == 2 + stmt = ( + select(func.count()) + .select_from(InvitedUser) + .where(InvitedUser.status != InvitedUserStatus.EXPIRED) ) + count = db.session.execute(stmt).scalar() or 0 + assert count == 2 expire_invitations_created_more_than_two_days_ago() - assert ( - len( - InvitedUser.query.filter( - InvitedUser.status != InvitedUserStatus.EXPIRED - ).all() - ) - == 1 - ) - assert ( - InvitedUser.query.filter(InvitedUser.status != InvitedUserStatus.EXPIRED) - .first() - .email_address - == "valid@2.com" - ) - assert ( - InvitedUser.query.filter(InvitedUser.status == InvitedUserStatus.EXPIRED) - .first() - .email_address - == "expired@1.com" + stmt = ( + select(func.count()) + .select_from(InvitedUser) + .where(InvitedUser.status != InvitedUserStatus.EXPIRED) ) + count = db.session.execute(stmt).scalar() or 0 + assert count == 1 + stmt = select(InvitedUser).where(InvitedUser.status != InvitedUserStatus.EXPIRED) + invited_user = db.session.execute(stmt).scalars().first() + assert invited_user.email_address == "valid@2.com" + stmt = select(InvitedUser).where(InvitedUser.status == InvitedUserStatus.EXPIRED) + invited_user = db.session.execute(stmt).scalars().first() + + assert invited_user.email_address == "expired@1.com" def make_invitation(user, service, age=None, email_address="test@test.com"): diff --git a/tests/app/dao/test_provider_details_dao.py b/tests/app/dao/test_provider_details_dao.py index 624ce2bb5..84c4b2238 100644 --- a/tests/app/dao/test_provider_details_dao.py +++ b/tests/app/dao/test_provider_details_dao.py @@ -2,7 +2,7 @@ import pytest from freezegun import freeze_time -from sqlalchemy import delete, select +from sqlalchemy import delete, select, update from app import db, notification_provider_clients from app.dao.provider_details_dao import ( @@ -69,7 +69,6 @@ def test_should_not_error_if_any_provider_in_code_not_in_database( stmt = delete(ProviderDetails).where(ProviderDetails.identifier == "sns") db.session.execute(stmt) db.session.commit() - # ProviderDetails.query.filter_by(identifier="sns").delete() assert notification_provider_clients.get_sms_client("sns") @@ -135,9 +134,13 @@ def test_get_alternative_sms_provider_fails_if_unrecognised(): @freeze_time("2016-01-01 01:00") def test_get_sms_providers_for_update_returns_providers(restore_provider_details): - ProviderDetails.query.filter(ProviderDetails.identifier == "sns").update( - {"updated_at": None} + stmt = ( + update(ProviderDetails) + .where(ProviderDetails.identifier == "sns") + .values({"updated_at": None}) ) + db.session.execute(stmt) + db.session.commit() resp = _get_sms_providers_for_update(timedelta(hours=1)) @@ -149,9 +152,13 @@ def test_get_sms_providers_for_update_returns_nothing_if_recent_updates( restore_provider_details, ): fifty_nine_minutes_ago = datetime(2016, 1, 1, 0, 1) - ProviderDetails.query.filter(ProviderDetails.identifier == "sns").update( - {"updated_at": fifty_nine_minutes_ago} + stmt = ( + update(ProviderDetails) + .where(ProviderDetails.identifier == "sns") + .values({"updated_at": fifty_nine_minutes_ago}) ) + db.session.execute(stmt) + db.session.commit() resp = _get_sms_providers_for_update(timedelta(hours=1)) diff --git a/tests/app/test_schemas.py b/tests/app/test_schemas.py index 270c36a17..b71d2fef8 100644 --- a/tests/app/test_schemas.py +++ b/tests/app/test_schemas.py @@ -2,8 +2,9 @@ import pytest from marshmallow import ValidationError -from sqlalchemy import desc +from sqlalchemy import desc, select +from app import db from app.dao.provider_details_dao import ( dao_update_provider_details, get_provider_details_by_identifier, @@ -145,13 +146,13 @@ def test_provider_details_history_schema_returns_user_details( dao_update_provider_details(current_sms_provider) - current_sms_provider_in_history = ( - ProviderDetailsHistory.query.filter( - ProviderDetailsHistory.id == current_sms_provider.id - ) + stmt = ( + select(ProviderDetailsHistory) + .where(ProviderDetailsHistory.id == current_sms_provider.id) .order_by(desc(ProviderDetailsHistory.version)) - .first() ) + current_sms_provider_in_history = db.session.execute(stmt).scalars().first() + data = provider_details_schema.dump(current_sms_provider_in_history) assert sorted(data["created_by"].keys()) == sorted(["id", "email_address", "name"]) From 7ee741b91c7c189db912cf83d7f9e40197d69d8c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 11:15:08 -0700 Subject: [PATCH 10/45] fix more tests --- .ds.baseline | 6 +-- tests/app/conftest.py | 29 +++++++++----- ...t_notification_dao_delete_notifications.py | 13 +++--- tests/app/db.py | 19 ++++++--- tests/app/service/test_rest.py | 40 +++++++++++++------ tests/app/template/test_rest.py | 5 ++- 6 files changed, 75 insertions(+), 37 deletions(-) diff --git a/.ds.baseline b/.ds.baseline index dd916c550..41a911ddd 100644 --- a/.ds.baseline +++ b/.ds.baseline @@ -267,7 +267,7 @@ "filename": "tests/app/db.py", "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", "is_verified": false, - "line_number": 87, + "line_number": 90, "is_secret": false } ], @@ -305,7 +305,7 @@ "filename": "tests/app/service/test_rest.py", "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", "is_verified": false, - "line_number": 1275, + "line_number": 1284, "is_secret": false } ], @@ -384,5 +384,5 @@ } ] }, - "generated_at": "2024-10-28T20:26:27Z" + "generated_at": "2024-10-30T18:15:03Z" } diff --git a/tests/app/conftest.py b/tests/app/conftest.py index 25e9f3f08..38e2e80d2 100644 --- a/tests/app/conftest.py +++ b/tests/app/conftest.py @@ -6,6 +6,7 @@ import pytz import requests_mock from flask import current_app, url_for +from sqlalchemy import select from sqlalchemy.orm.session import make_transient from app import db @@ -100,9 +101,10 @@ def create_sample_notification( if job is None and api_key is None: # we didn't specify in test - lets create it - api_key = ApiKey.query.filter( + stmt = select(ApiKey).where( ApiKey.service == template.service, ApiKey.key_type == key_type - ).first() + ) + api_key = db.session.execute(stmt).scalars().first() if not api_key: api_key = create_api_key(template.service, key_type=key_type) @@ -227,7 +229,8 @@ def sample_service(sample_user): "email_from": email_from, "created_by": sample_user, } - service = Service.query.filter_by(name=service_name).first() + stmt = select(Service).where(Service.name == service_name) + service = db.session.execute(stmt).scalars().first() if not service: service = Service(**data) dao_create_service(service, sample_user, service_permissions=None) @@ -442,9 +445,10 @@ def sample_notification(notify_db_session): service = create_service(check_if_service_exists=True) template = create_template(service=service) - api_key = ApiKey.query.filter( + stmt = select(ApiKey).where( ApiKey.service == template.service, ApiKey.key_type == KeyType.NORMAL - ).first() + ) + api_key = db.session.execute(stmt).scalars().first() if not api_key: api_key = create_api_key(template.service, key_type=KeyType.NORMAL) @@ -595,9 +599,12 @@ def sample_user_service_permission(sample_user): permission = PermissionType.MANAGE_SETTINGS data = {"user": sample_user, "service": service, "permission": permission} - p_model = Permission.query.filter_by( - user=sample_user, service=service, permission=permission - ).first() + stmt = select(Permission).where( + Permission.user == sample_user, + Permission.service == service, + Permission.permission == permission, + ) + p_model = db.session.execute(stmt).scalars().first() if not p_model: p_model = Permission(**data) db.session.add(p_model) @@ -612,12 +619,14 @@ def fake_uuid(): @pytest.fixture(scope="function") def ses_provider(): - return ProviderDetails.query.filter_by(identifier="ses").one() + stmt = select(ProviderDetails).where(ProviderDetails.identifier == "ses") + return db.session.execute(stmt).scalars().one() @pytest.fixture(scope="function") def sns_provider(): - return ProviderDetails.query.filter_by(identifier="sns").one() + stmt = select(ProviderDetails).where(ProviderDetails.identifier == "sns") + return db.session.execute(stmt).scalars().one() @pytest.fixture(scope="function") diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index e22721216..45d8958d1 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -2,7 +2,9 @@ from datetime import datetime, timedelta from freezegun import freeze_time +from sqlalchemy import func, select +from app import db from app.dao.notifications_dao import ( insert_notification_history_delete_notifications, move_notifications_to_notification_history, @@ -172,12 +174,13 @@ def test_move_notifications_just_deletes_test_key_notifications(sample_template) assert Notification.query.count() == 0 assert NotificationHistory.query.count() == 2 - assert ( - NotificationHistory.query.filter( - NotificationHistory.key_type == KeyType.TEST - ).count() - == 0 + stmt = ( + select(func.count()) + .select_from(NotificationHistory) + .where(NotificationHistory.key_type == KeyType.TEST) ) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 @freeze_time("2020-03-20 14:00") diff --git a/tests/app/db.py b/tests/app/db.py index b62f99b4e..07b395295 100644 --- a/tests/app/db.py +++ b/tests/app/db.py @@ -2,6 +2,8 @@ import uuid from datetime import datetime, timedelta +from sqlalchemy import select + from app import db from app.dao import fact_processing_time_dao from app.dao.email_branding_dao import dao_create_email_branding @@ -90,7 +92,8 @@ def create_user( "state": state, "platform_admin": platform_admin, } - user = User.query.filter_by(email_address=email).first() + stmt = select(User).where(User.email_address == email) + user = db.session.execute(stmt).scalars().first() if not user: user = User(**data) save_model_user(user, validated_email_access=True) @@ -130,7 +133,8 @@ def create_service( billing_reference=None, ): if check_if_service_exists: - service = Service.query.filter_by(name=service_name).first() + stmt = select(Service).where(Service.name == service_name) + service = db.session.execute(stmt).scalars().first() if (not check_if_service_exists) or (check_if_service_exists and not service): service = Service( name=service_name, @@ -175,7 +179,8 @@ def create_service( def create_service_with_inbound_number(inbound_number="1234567", *args, **kwargs): service = create_service(*args, **kwargs) - sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + stmt = select(ServiceSmsSender).where(ServiceSmsSender.service_id == service.id) + sms_sender = db.session.execute(stmt).scalars().first() inbound = create_inbound_number(number=inbound_number, service_id=service.id) update_existing_sms_sender_with_inbound_number( service_sms_sender=sms_sender, @@ -189,7 +194,8 @@ def create_service_with_inbound_number(inbound_number="1234567", *args, **kwargs def create_service_with_defined_sms_sender(sms_sender_value="1234567", *args, **kwargs): service = create_service(*args, **kwargs) - sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first() + stmt = select(ServiceSmsSender).where(ServiceSmsSender.service_id == service.id) + sms_sender = db.session.execute(stmt).scalars().first() dao_update_service_sms_sender( service_id=service.id, service_sms_sender_id=sms_sender.id, @@ -286,9 +292,10 @@ def create_notification( if not one_off and (job is None and api_key is None): # we did not specify in test - lets create it - api_key = ApiKey.query.filter( + stmt = select(ApiKey).where( ApiKey.service == template.service, ApiKey.key_type == key_type - ).first() + ) + api_key = db.session.execute(stmt).scalars().first() if not api_key: api_key = create_api_key(template.service, key_type=key_type) diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index ecec87ec1..1a48014ec 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -6,8 +6,10 @@ import pytest from flask import current_app, url_for from freezegun import freeze_time +from sqlalchemy import func, select from sqlalchemy.exc import SQLAlchemyError +from app import db from app.dao.organization_dao import dao_add_service_to_organization from app.dao.service_sms_sender_dao import dao_get_sms_senders_by_service_id from app.dao.service_user_dao import dao_get_service_user @@ -424,9 +426,8 @@ def test_create_service( assert json_resp["data"]["name"] == "created service" - service_sms_senders = ServiceSmsSender.query.filter_by( - service_id=service_db.id - ).all() + stmt = select(ServiceSmsSender).where(ServiceSmsSender.service_id == service_db.id) + service_sms_senders = db.session.execute(stmt).scalars().all() assert len(service_sms_senders) == 1 assert service_sms_senders[0].sms_sender == current_app.config["FROM_NUMBER"] @@ -530,7 +531,13 @@ def test_create_service_should_raise_exception_and_not_create_service_if_annual_ annual_billing = AnnualBilling.query.all() assert len(annual_billing) == 0 - assert len(Service.query.filter(Service.name == "created service").all()) == 0 + stmt = ( + select(func.count()) + .select_from(Service) + .where(Service.name == "created service") + ) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 def test_create_service_inherits_branding_from_organization( @@ -933,7 +940,8 @@ def test_update_service_flags_will_remove_service_permissions( assert resp.status_code == 200 assert ServicePermissionType.INTERNATIONAL_SMS not in result["data"]["permissions"] - permissions = ServicePermission.query.filter_by(service_id=service.id).all() + stmt = select(ServicePermission).where(ServicePermission.service_id == service.id) + permissions = db.session.execute(stmt).scalars().all() assert {p.permission for p in permissions} == { ServicePermissionType.SMS, ServicePermissionType.EMAIL, @@ -1004,9 +1012,10 @@ def test_add_service_permission_will_add_permission( headers=[("Content-Type", "application/json"), auth_header], ) - permissions = ServicePermission.query.filter_by( - service_id=service_with_no_permissions.id - ).all() + stmt = select(ServicePermission).where( + ServicePermission.service_id == service_with_no_permissions.id + ) + permissions = db.session.execute(stmt).scalars().all() assert resp.status_code == 200 assert [p.permission for p in permissions] == [permission_to_add] @@ -3318,8 +3327,13 @@ def test_add_service_sms_sender_when_it_is_an_inbound_number_inserts_new_sms_sen assert resp_json["inbound_number_id"] == str(inbound_number.id) assert resp_json["is_default"] - senders = ServiceSmsSender.query.filter_by(service_id=service.id).all() - assert len(senders) == 3 + stmt = ( + select(func.count()) + .select_from(ServiceSmsSender) + .where(ServiceSmsSender.service_id == service.id) + ) + senders = db.session.execute(stmt).scalar() or 0 + assert senders == 3 def test_add_service_sms_sender_switches_default(client, notify_db_session): @@ -3341,7 +3355,8 @@ def test_add_service_sms_sender_switches_default(client, notify_db_session): assert resp_json["sms_sender"] == "second" assert not resp_json["inbound_number_id"] assert resp_json["is_default"] - sms_senders = ServiceSmsSender.query.filter_by(sms_sender="first").first() + stmt = select(ServiceSmsSender).where(ServiceSmsSender.sms_sender == "first") + sms_senders = db.session.execute(stmt).scalars().first() assert not sms_senders.is_default @@ -3407,7 +3422,8 @@ def test_update_service_sms_sender_switches_default(client, notify_db_session): assert resp_json["sms_sender"] == "second" assert not resp_json["inbound_number_id"] assert resp_json["is_default"] - sms_senders = ServiceSmsSender.query.filter_by(sms_sender="first").first() + stmt = select(ServiceSmsSender).where(ServiceSmsSender.sms_sender == "first") + sms_senders = db.session.execute(stmt).scalars().first() assert not sms_senders.is_default diff --git a/tests/app/template/test_rest.py b/tests/app/template/test_rest.py index 45dfc24f9..d46627343 100644 --- a/tests/app/template/test_rest.py +++ b/tests/app/template/test_rest.py @@ -6,7 +6,9 @@ import pytest from freezegun import freeze_time +from sqlalchemy import select +from app import db from app.dao.templates_dao import dao_get_template_by_id, dao_redact_template from app.enums import ServicePermissionType, TemplateProcessType, TemplateType from app.models import Template, TemplateHistory @@ -86,7 +88,8 @@ def test_create_a_new_template_for_a_service_adds_folder_relationship( data=data, ) assert response.status_code == 201 - template = Template.query.filter(Template.name == "my template").first() + stmt = select(Template).where(Template.name == "my template") + template = db.session.execute(stmt).scalars().first() assert template.folder == parent_folder From 9dfbd991d568405c07eec06d9f9a8b995d9b6a29 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 11:44:51 -0700 Subject: [PATCH 11/45] fix more tests --- app/commands.py | 59 ++++++++++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 25 deletions(-) diff --git a/app/commands.py b/app/commands.py index 5580e7632..c88e2bcb3 100644 --- a/app/commands.py +++ b/app/commands.py @@ -12,7 +12,7 @@ from faker import Faker from flask import current_app, json from notifications_python_client.authentication import create_jwt_token -from sqlalchemy import and_, text +from sqlalchemy import and_, select, text, update from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound @@ -123,8 +123,8 @@ def purge_functional_test_data(user_email_prefix): if getenv("NOTIFY_ENVIRONMENT", "") not in ["development", "test"]: current_app.logger.error("Can only be run in development") return - - users = User.query.filter(User.email_address.like(f"{user_email_prefix}%")).all() + stmt = select(User).where(User.email_address.like(f"{user_email_prefix}%")) + users = db.session.execute(stmt).scalars().all() for usr in users: # Make sure the full email includes a uuid in it # Just in case someone decides to use a similar email address. @@ -338,9 +338,10 @@ def boolean_or_none(field): email_branding = None email_branding_column = columns[5].strip() if len(email_branding_column) > 0: - email_branding = EmailBranding.query.filter( + stmt = select(EmailBranding).where( EmailBranding.name == email_branding_column - ).one() + ) + email_branding = db.session.execute(stmt).scalars().one() data = { "name": columns[0], "active": True, @@ -406,10 +407,14 @@ def populate_organization_agreement_details_from_file(file_name): @notify_command(name="associate-services-to-organizations") def associate_services_to_organizations(): - services = Service.get_history_model().query.filter_by(version=1).all() + stmt = select(Service.get_history_model()).where( + Service.get_history_model().version == 1 + ) + services = db.session.execute(stmt).scalars().all() for s in services: - created_by_user = User.query.filter_by(id=s.created_by_id).first() + stmt = select(User).where(User.id == s.created_by_id) + created_by_user = db.session.execute(stmt).scalars().first() organization = dao_get_organization_by_email_address( created_by_user.email_address ) @@ -467,15 +472,16 @@ def populate_go_live(file_name): @notify_command(name="fix-billable-units") def fix_billable_units(): - query = Notification.query.filter( + stmt = select(Notification).where( Notification.notification_type == NotificationType.SMS, Notification.status != NotificationStatus.CREATED, Notification.sent_at == None, # noqa Notification.billable_units == 0, Notification.key_type != KeyType.TEST, ) + all = db.session.execute(stmt).scalars().all() - for notification in query.all(): + for notification in all: template_model = dao_get_template_by_id( notification.template_id, notification.template_version ) @@ -490,9 +496,12 @@ def fix_billable_units(): f"Updating notification: {notification.id} with {template.fragment_count} billable_units" ) - Notification.query.filter(Notification.id == notification.id).update( - {"billable_units": template.fragment_count} + stmt = ( + update(Notification) + .where(Notification.id == notification.id) + .values({"billable_units": template.fragment_count}) ) + db.session.execute(stmt) db.session.commit() current_app.logger.info("End fix_billable_units") @@ -637,8 +646,8 @@ def populate_annual_billing_with_defaults(year, missing_services_only): This is useful to ensure all services start the new year with the correct annual billing. """ if missing_services_only: - active_services = ( - Service.query.filter(Service.active) + stmt = ( + select(Service) .outerjoin( AnnualBilling, and_( @@ -646,20 +655,18 @@ def populate_annual_billing_with_defaults(year, missing_services_only): AnnualBilling.financial_year_start == year, ), ) - .filter(AnnualBilling.id == None) # noqa - .all() + .where(Service.active, AnnualBilling.id == None) # noqa ) + active_services = db.session.execute(stmt).scalars().all() else: - active_services = Service.query.filter(Service.active).all() + stmt = select(Service).where(Service.active) + active_services = db.session.execute(stmt).scalars().all() previous_year = year - 1 - services_with_zero_free_allowance = ( - db.session.query(AnnualBilling.service_id) - .filter( - AnnualBilling.financial_year_start == previous_year, - AnnualBilling.free_sms_fragment_limit == 0, - ) - .all() + stmt = select(AnnualBilling.id).where( + AnnualBilling.financial_year_start == previous_year, + AnnualBilling.free_sms_fragment_limit == 0, ) + services_with_zero_free_allowance = db.session.execute(stmt).scalars().all() for service in active_services: # If a service has free_sms_fragment_limit for the previous year @@ -750,7 +757,8 @@ def create_user_jwt(token): def _update_template(id, name, template_type, content, subject): - template = Template.query.filter_by(id=id).first() + stmt = select(Template).where(Template.id == id) + template = db.session.execute(stmt).scalars().first() if not template: template = Template(id=id) template.service_id = "d6aa2c68-a2d9-4437-ab19-3ae8eb202553" @@ -761,7 +769,8 @@ def _update_template(id, name, template_type, content, subject): template.content = "\n".join(content) template.subject = subject - history = TemplateHistory.query.filter_by(id=id).first() + stmt = select(TemplateHistory).where(TemplateHistory.id == id) + history = db.session.execute(stmt).scalars().first() if not history: history = TemplateHistory(id=id) history.service_id = "d6aa2c68-a2d9-4437-ab19-3ae8eb202553" From 395ddd2c47ff8c8ee5774db15a13eae20bd2b62c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 12:17:59 -0700 Subject: [PATCH 12/45] fix more tests --- app/dao/service_data_retention_dao.py | 48 ++++++++++++++++----------- app/dao/service_guest_list_dao.py | 14 ++++---- app/dao/webauthn_credential_dao.py | 7 ++-- 3 files changed, 42 insertions(+), 27 deletions(-) diff --git a/app/dao/service_data_retention_dao.py b/app/dao/service_data_retention_dao.py index b95ca5720..cd2c1fd4b 100644 --- a/app/dao/service_data_retention_dao.py +++ b/app/dao/service_data_retention_dao.py @@ -1,3 +1,5 @@ +from sqlalchemy import select, update + from app import db from app.dao.dao_utils import autocommit from app.models import ServiceDataRetention @@ -5,29 +7,31 @@ def fetch_service_data_retention_by_id(service_id, data_retention_id): - data_retention = ServiceDataRetention.query.filter_by( - service_id=service_id, id=data_retention_id - ).first() - return data_retention + stmt = select(ServiceDataRetention).where( + ServiceDataRetention.service_id == service_id, + ServiceDataRetention.id == data_retention_id, + ) + return db.session.execute(stmt).scalars().first() def fetch_service_data_retention(service_id): - data_retention_list = ( - ServiceDataRetention.query.filter_by(service_id=service_id) + stmt = ( + select(ServiceDataRetention) + .where(ServiceDataRetention.service_id == service_id) .order_by( # in the order that models.notification_types are created (email, sms, letter) ServiceDataRetention.notification_type ) - .all() ) - return data_retention_list + return db.session.execute(stmt).scalars().all() def fetch_service_data_retention_by_notification_type(service_id, notification_type): - data_retention_list = ServiceDataRetention.query.filter_by( - service_id=service_id, notification_type=notification_type - ).first() - return data_retention_list + stmt = select(ServiceDataRetention).where( + ServiceDataRetention.service_id == service_id, + ServiceDataRetention.notification_type == notification_type, + ) + return db.session.execute(stmt).scalars().first() @autocommit @@ -46,16 +50,22 @@ def insert_service_data_retention(service_id, notification_type, days_of_retenti def update_service_data_retention( service_data_retention_id, service_id, days_of_retention ): - updated_count = ServiceDataRetention.query.filter( - ServiceDataRetention.id == service_data_retention_id, - ServiceDataRetention.service_id == service_id, - ).update({"days_of_retention": days_of_retention, "updated_at": utc_now()}) - return updated_count + stmt = ( + update(ServiceDataRetention) + .where( + ServiceDataRetention.id == service_data_retention_id, + ServiceDataRetention.service_id == service_id, + ) + .values({"days_of_retention": days_of_retention, "updated_at": utc_now()}) + ) + result = db.session.execute(stmt) + return result.rowcount def fetch_service_data_retention_for_all_services_by_notification_type( notification_type, ): - return ServiceDataRetention.query.filter( + stmt = select(ServiceDataRetention).where( ServiceDataRetention.notification_type == notification_type - ).all() + ) + return db.session.execute(stmt).scalars().all() diff --git a/app/dao/service_guest_list_dao.py b/app/dao/service_guest_list_dao.py index acd39703c..59d381a8b 100644 --- a/app/dao/service_guest_list_dao.py +++ b/app/dao/service_guest_list_dao.py @@ -1,11 +1,12 @@ +from sqlalchemy import delete, select + from app import db from app.models import ServiceGuestList def dao_fetch_service_guest_list(service_id): - return ServiceGuestList.query.filter( - ServiceGuestList.service_id == service_id - ).all() + stmt = select(ServiceGuestList).where(ServiceGuestList.service_id == service_id) + return db.session.execute(stmt).scalars().all() def dao_add_and_commit_guest_list_contacts(objs): @@ -14,6 +15,7 @@ def dao_add_and_commit_guest_list_contacts(objs): def dao_remove_service_guest_list(service_id): - return ServiceGuestList.query.filter( - ServiceGuestList.service_id == service_id - ).delete() + stmt = delete(ServiceGuestList).where(ServiceGuestList.service_id == service_id) + result = db.session.execute(stmt) + db.session.commit() + return result.rowcount diff --git a/app/dao/webauthn_credential_dao.py b/app/dao/webauthn_credential_dao.py index b34d3c014..4c7a0c888 100644 --- a/app/dao/webauthn_credential_dao.py +++ b/app/dao/webauthn_credential_dao.py @@ -1,13 +1,16 @@ +from sqlalchemy import select + from app import db from app.dao.dao_utils import autocommit from app.models import WebauthnCredential def dao_get_webauthn_credential_by_user_and_id(user_id, webauthn_credential_id): - return WebauthnCredential.query.filter( + stmt = select(WebauthnCredential).where( WebauthnCredential.user_id == user_id, WebauthnCredential.id == webauthn_credential_id, - ).one() + ) + return db.session.execute(stmt).scalars().one() @autocommit From 03312bfdc47c9ac6194b4a05944bcf4642bd2d28 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 13:09:30 -0700 Subject: [PATCH 13/45] fix more tests --- tests/app/dao/test_service_guest_list_dao.py | 10 +++- .../test_process_notification.py | 54 ++++++++++++------- tests/app/test_commands.py | 36 ++++++++----- 3 files changed, 67 insertions(+), 33 deletions(-) diff --git a/tests/app/dao/test_service_guest_list_dao.py b/tests/app/dao/test_service_guest_list_dao.py index 870c78bd8..021f42319 100644 --- a/tests/app/dao/test_service_guest_list_dao.py +++ b/tests/app/dao/test_service_guest_list_dao.py @@ -1,5 +1,8 @@ import uuid +from sqlalchemy import func, select + +from app import db from app.dao.service_guest_list_dao import ( dao_add_and_commit_guest_list_contacts, dao_fetch_service_guest_list, @@ -27,7 +30,8 @@ def test_add_and_commit_guest_list_contacts_saves_data(sample_service): dao_add_and_commit_guest_list_contacts([guest_list]) - db_contents = ServiceGuestList.query.all() + stmt = select(ServiceGuestList) + db_contents = db.session.execute(stmt).scalars().all() assert len(db_contents) == 1 assert db_contents[0].id == guest_list.id @@ -60,4 +64,6 @@ def test_remove_service_guest_list_does_not_commit( # since dao_remove_service_guest_list doesn't commit, we can still rollback its changes notify_db_session.rollback() - assert ServiceGuestList.query.count() == 1 + stmt = select(func.count()).select_from(ServiceGuestList) + count = db.session.execute(stmt).scalar() or 0 + assert count == 1 diff --git a/tests/app/notifications/test_process_notification.py b/tests/app/notifications/test_process_notification.py index d7caf5bb1..9f393b440 100644 --- a/tests/app/notifications/test_process_notification.py +++ b/tests/app/notifications/test_process_notification.py @@ -5,8 +5,10 @@ import pytest from boto3.exceptions import Boto3Error from freezegun import freeze_time +from sqlalchemy import func, select from sqlalchemy.exc import SQLAlchemyError +from app import db from app.enums import KeyType, NotificationType, ServicePermissionType, TemplateType from app.errors import BadRequestError from app.models import Notification, NotificationHistory @@ -67,12 +69,22 @@ def test_create_content_for_notification_allows_additional_personalisation( ) +def _get_notification_query_count(): + stmt = select(func.count()).select_from(Notification) + return db.session.execute(stmt).scalar() or 0 + + +def _get_notification_history_query_count(): + stmt = select(func.count()).select_from(NotificationHistory) + return db.session.execute(stmt).scalar() or 0 + + @freeze_time("2016-01-01 11:09:00.061258") def test_persist_notification_creates_and_save_to_db( sample_template, sample_api_key, sample_job ): - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 + assert _get_notification_query_count() == 0 + assert _get_notification_history_query_count() == 0 notification = persist_notification( template_id=sample_template.id, template_version=sample_template.version, @@ -114,8 +126,8 @@ def test_persist_notification_creates_and_save_to_db( def test_persist_notification_throws_exception_when_missing_template(sample_api_key): - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 + assert _get_notification_query_count() == 0 + assert _get_notification_history_query_count() == 0 with pytest.raises(SQLAlchemyError): persist_notification( template_id=None, @@ -127,14 +139,14 @@ def test_persist_notification_throws_exception_when_missing_template(sample_api_ api_key_id=sample_api_key.id, key_type=sample_api_key.key_type, ) - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 + assert _get_notification_query_count() == 0 + assert _get_notification_history_query_count() == 0 @freeze_time("2016-01-01 11:09:00.061258") def test_persist_notification_with_optionals(sample_job, sample_api_key): - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 + assert _get_notification_query_count() == 0 + assert _get_notification_history_query_count() == 0 n_id = uuid.uuid4() created_at = datetime.datetime(2016, 11, 11, 16, 8, 18) persist_notification( @@ -153,9 +165,10 @@ def test_persist_notification_with_optionals(sample_job, sample_api_key): notification_id=n_id, created_by_id=sample_job.created_by_id, ) - assert Notification.query.count() == 1 - assert NotificationHistory.query.count() == 0 - persisted_notification = Notification.query.all()[0] + assert _get_notification_query_count() == 1 + assert _get_notification_history_query_count() == 0 + stmt = select(Notification) + persisted_notification = db.session.execute(stmt).scalars().all()[0] assert persisted_notification.id == n_id assert persisted_notification.job_id == sample_job.id assert persisted_notification.job_row_number == 10 @@ -267,8 +280,8 @@ def test_send_notification_to_queue_throws_exception_deletes_notification( queue="send-sms-tasks", ) - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 0 + assert _get_notification_query_count() == 0 + assert _get_notification_history_query_count() == 0 @pytest.mark.parametrize( @@ -349,7 +362,8 @@ def test_persist_notification_with_international_info_stores_correct_info( job_row_number=10, client_reference="ref from client", ) - persisted_notification = Notification.query.all()[0] + stmt = select(Notification) + persisted_notification = db.session.execute(stmt).scalars().all()[0] assert persisted_notification.international is expected_international assert persisted_notification.phone_prefix == expected_prefix @@ -372,7 +386,8 @@ def test_persist_notification_with_international_info_does_not_store_for_email( job_row_number=10, client_reference="ref from client", ) - persisted_notification = Notification.query.all()[0] + stmt = select(Notification) + persisted_notification = db.session.execute(stmt).scalars().all()[0] assert persisted_notification.international is False assert persisted_notification.phone_prefix is None @@ -404,7 +419,8 @@ def test_persist_sms_notification_stores_normalised_number( key_type=sample_api_key.key_type, job_id=sample_job.id, ) - persisted_notification = Notification.query.all()[0] + stmt = select(Notification) + persisted_notification = db.session.execute(stmt).scalars().all()[0] assert persisted_notification.to == "1" assert persisted_notification.normalised_to == "1" @@ -428,7 +444,8 @@ def test_persist_email_notification_stores_normalised_email( key_type=sample_api_key.key_type, job_id=sample_job.id, ) - persisted_notification = Notification.query.all()[0] + stmt = select(Notification) + persisted_notification = db.session.execute(stmt).scalars().all()[0] assert persisted_notification.to == "1" assert persisted_notification.normalised_to == "1" @@ -449,6 +466,7 @@ def test_persist_notification_with_billable_units_stores_correct_info(mocker): key_type=KeyType.NORMAL, billable_units=3, ) - persisted_notification = Notification.query.all()[0] + stmt = select(Notification) + persisted_notification = db.session.execute(stmt).scalars().all()[0] assert persisted_notification.billable_units == 3 diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 361de9a40..a273efe8d 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock, mock_open import pytest -from sqlalchemy import select +from sqlalchemy import func, select from app import db from app.commands import ( @@ -56,8 +56,13 @@ ) +def _get_user_query_count(): + stmt = select(func.count()).select_from(User) + return db.session.execute(stmt).scalar() or 0 + + def test_purge_functional_test_data(notify_db_session, notify_api): - orig_user_count = User.query.count() + orig_user_count = _get_user_query_count() notify_api.test_cli_runner().invoke( create_test_user, @@ -73,16 +78,16 @@ def test_purge_functional_test_data(notify_db_session, notify_api): ], ) - user_count = User.query.count() + user_count = _get_user_query_count() assert user_count == orig_user_count + 1 notify_api.test_cli_runner().invoke(purge_functional_test_data, ["-u", "somebody"]) # if the email address has a uuid, it is test data so it should be purged and there should be # zero users. Otherwise, it is real data so there should be one user. - assert User.query.count() == orig_user_count + assert _get_user_query_count() == orig_user_count def test_purge_functional_test_data_bad_mobile(notify_db_session, notify_api): - user_count = User.query.count() + user_count = _get_user_query_count() assert user_count == 0 # run the command command_response = notify_api.test_cli_runner().invoke( @@ -101,7 +106,7 @@ def test_purge_functional_test_data_bad_mobile(notify_db_session, notify_api): # The bad mobile phone number results in a bad parameter error, # leading to a system exit 2 and no entry made in db assert "SystemExit(2)" in str(command_response) - user_count = User.query.count() + user_count = _get_user_query_count() assert user_count == 0 @@ -136,8 +141,13 @@ def test_update_jobs_archived_flag(notify_db_session, notify_api): assert job.archived is True +def _get_organization_query_count(): + stmt = select(Organization) + return db.session.execute(stmt).scalar() or 0 + + def test_populate_organizations_from_file(notify_db_session, notify_api): - org_count = Organization.query.count() + org_count = _get_organization_query_count() assert org_count == 0 file_name = "./tests/app/orgs1.csv" @@ -152,7 +162,7 @@ def test_populate_organizations_from_file(notify_db_session, notify_api): os.remove(file_name) print(f"command_response = {command_response}") - org_count = Organization.query.count() + org_count = _get_organization_query_count() assert org_count == 1 @@ -161,10 +171,10 @@ def test_populate_organization_agreement_details_from_file( ): file_name = "./tests/app/orgs.csv" - org_count = Organization.query.count() + org_count = _get_organization_query_count() assert org_count == 0 create_organization() - org_count = Organization.query.count() + org_count = _get_organization_query_count() assert org_count == 1 org = Organization.query.one() @@ -183,7 +193,7 @@ def test_populate_organization_agreement_details_from_file( ) print(f"command_response = {command_response}") - org_count = Organization.query.count() + org_count = _get_organization_query_count() assert org_count == 1 org = Organization.query.one() assert org.agreement_signed_on_behalf_of_name == "bob" @@ -224,7 +234,7 @@ def test_bulk_invite_user_to_service( def test_create_test_user_command(notify_db_session, notify_api): # number of users before adding ours - user_count = User.query.count() + user_count = _get_user_query_count() # run the command notify_api.test_cli_runner().invoke( @@ -242,7 +252,7 @@ def test_create_test_user_command(notify_db_session, notify_api): ) # there should be one more user - assert User.query.count() == user_count + 1 + assert _get_user_query_count() == user_count + 1 # that user should be the one we added stmt = select(User).where(User.name == "Fake Personson") From 50a9730f0b312f4adf737a76a0f4fd73cba10fc3 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 13:25:16 -0700 Subject: [PATCH 14/45] fix more tests --- app/dao/service_guest_list_dao.py | 1 - tests/app/test_commands.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/app/dao/service_guest_list_dao.py b/app/dao/service_guest_list_dao.py index 59d381a8b..8e128a213 100644 --- a/app/dao/service_guest_list_dao.py +++ b/app/dao/service_guest_list_dao.py @@ -17,5 +17,4 @@ def dao_add_and_commit_guest_list_contacts(objs): def dao_remove_service_guest_list(service_id): stmt = delete(ServiceGuestList).where(ServiceGuestList.service_id == service_id) result = db.session.execute(stmt) - db.session.commit() return result.rowcount diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index a273efe8d..b4d9033b1 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -142,7 +142,7 @@ def test_update_jobs_archived_flag(notify_db_session, notify_api): def _get_organization_query_count(): - stmt = select(Organization) + stmt = select(func.count()).select_from(Organization) return db.session.execute(stmt).scalar() or 0 From 4cb360ecb21de5d3e0bd8c91bf645dbd8fe61214 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 14:01:04 -0700 Subject: [PATCH 15/45] fix more --- tests/app/celery/test_tasks.py | 21 +++++++++++++-------- tests/app/dao/test_invited_user_dao.py | 17 +++++++++++------ tests/app/service/test_api_key_endpoints.py | 17 ++++++++++++----- tests/app/user/test_rest_verify.py | 12 +++++++++--- 4 files changed, 45 insertions(+), 22 deletions(-) diff --git a/tests/app/celery/test_tasks.py b/tests/app/celery/test_tasks.py index a0fd70584..5720b15f9 100644 --- a/tests/app/celery/test_tasks.py +++ b/tests/app/celery/test_tasks.py @@ -531,7 +531,12 @@ def test_should_not_save_sms_if_restricted_service_and_invalid_number( encryption.encrypt(notification), ) assert provider_tasks.deliver_sms.apply_async.called is False - assert Notification.query.count() == 0 + assert _get_notification_query_count() == 0 + + +def _get_notification_query_count(): + stmt = select(func.count()).select_from(Notification) + return db.session.execute(stmt).scalar() or 0 def test_should_not_save_email_if_restricted_service_and_invalid_email_address( @@ -553,7 +558,7 @@ def test_should_not_save_email_if_restricted_service_and_invalid_email_address( encryption.encrypt(notification), ) - assert Notification.query.count() == 0 + assert _get_notification_query_count() == 0 def test_should_save_sms_template_to_and_persist_with_job_id(sample_job, mocker): @@ -593,7 +598,7 @@ def test_should_save_sms_template_to_and_persist_with_job_id(sample_job, mocker) def test_should_not_save_sms_if_team_key_and_recipient_not_in_team( notify_db_session, mocker ): - assert Notification.query.count() == 0 + assert _get_notification_query_count() == 0 user = create_user(mobile_number="2028675309") service = create_service(user=user, restricted=True) template = create_template(service=service) @@ -611,7 +616,7 @@ def test_should_not_save_sms_if_team_key_and_recipient_not_in_team( encryption.encrypt(notification), ) assert provider_tasks.deliver_sms.apply_async.called is False - assert Notification.query.count() == 0 + assert _get_notification_query_count() == 0 def test_should_use_email_template_and_persist( @@ -836,7 +841,7 @@ def test_save_sms_should_go_to_retry_queue_if_database_errors(sample_template, m assert provider_tasks.deliver_sms.apply_async.called is False tasks.save_sms.retry.assert_called_with(exc=expected_exception, queue="retry-tasks") - assert Notification.query.count() == 0 + assert _get_notification_query_count() == 0 def test_save_email_should_go_to_retry_queue_if_database_errors( @@ -866,7 +871,7 @@ def test_save_email_should_go_to_retry_queue_if_database_errors( exc=expected_exception, queue="retry-tasks" ) - assert Notification.query.count() == 0 + assert _get_notification_query_count() == 0 def test_save_email_does_not_send_duplicate_and_does_not_put_in_retry_queue( @@ -888,7 +893,7 @@ def test_save_email_does_not_send_duplicate_and_does_not_put_in_retry_queue( notification_id, encryption.encrypt(json), ) - assert Notification.query.count() == 1 + assert _get_notification_query_count() == 1 assert not deliver_email.called assert not retry.called @@ -912,7 +917,7 @@ def test_save_sms_does_not_send_duplicate_and_does_not_put_in_retry_queue( notification_id, encryption.encrypt(json), ) - assert Notification.query.count() == 1 + assert _get_notification_query_count() == 1 assert not deliver_sms.called assert not retry.called diff --git a/tests/app/dao/test_invited_user_dao.py b/tests/app/dao/test_invited_user_dao.py index 247a3dfda..44fc23572 100644 --- a/tests/app/dao/test_invited_user_dao.py +++ b/tests/app/dao/test_invited_user_dao.py @@ -19,8 +19,13 @@ from tests.app.db import create_invited_user +def _get_invited_user_count(): + stmt = select(func.count()).select_from(InvitedUser) + return db.session.execute(stmt).scalar() or 0 + + def test_create_invited_user(notify_db_session, sample_service): - assert InvitedUser.query.count() == 0 + assert _get_invited_user_count() == 0 email_address = "invited_user@service.gov.uk" invite_from = sample_service.users[0] @@ -35,7 +40,7 @@ def test_create_invited_user(notify_db_session, sample_service): invited_user = InvitedUser(**data) save_invited_user(invited_user) - assert InvitedUser.query.count() == 1 + assert _get_invited_user_count() == 1 assert invited_user.email_address == email_address assert invited_user.from_user == invite_from permissions = invited_user.get_permissions() @@ -48,7 +53,7 @@ def test_create_invited_user(notify_db_session, sample_service): def test_create_invited_user_sets_default_folder_permissions_of_empty_list( sample_service, ): - assert InvitedUser.query.count() == 0 + assert _get_invited_user_count() == 0 invite_from = sample_service.users[0] data = { @@ -61,7 +66,7 @@ def test_create_invited_user_sets_default_folder_permissions_of_empty_list( invited_user = InvitedUser(**data) save_invited_user(invited_user) - assert InvitedUser.query.count() == 1 + assert _get_invited_user_count() == 1 assert invited_user.folder_permissions == [] @@ -109,12 +114,12 @@ def test_get_invited_users_for_service_that_has_no_invites( def test_save_invited_user_sets_status_to_cancelled( notify_db_session, sample_invited_user ): - assert InvitedUser.query.count() == 1 + assert _get_invited_user_count() == 1 saved = InvitedUser.query.get(sample_invited_user.id) assert saved.status == InvitedUserStatus.PENDING saved.status = InvitedUserStatus.CANCELLED save_invited_user(saved) - assert InvitedUser.query.count() == 1 + assert _get_invited_user_count() == 1 cancelled_invited_user = InvitedUser.query.get(sample_invited_user.id) assert cancelled_invited_user.status == InvitedUserStatus.CANCELLED diff --git a/tests/app/service/test_api_key_endpoints.py b/tests/app/service/test_api_key_endpoints.py index 8ca0e374d..09a964b3c 100644 --- a/tests/app/service/test_api_key_endpoints.py +++ b/tests/app/service/test_api_key_endpoints.py @@ -1,7 +1,9 @@ import json from flask import url_for +from sqlalchemy import func, select +from app import db from app.dao.api_key_dao import expire_api_key from app.enums import KeyType from app.models import ApiKey @@ -60,10 +62,15 @@ def test_create_api_key_without_key_type_rejects(client, sample_service): assert json_resp["message"] == {"key_type": ["Missing data for required field."]} +def _get_api_key_count(): + stmt = select(func.count()).select_from(ApiKey) + return db.session.execute(stmt).scalar() or 0 + + def test_revoke_should_expire_api_key_for_service(notify_api, sample_api_key): with notify_api.test_request_context(): with notify_api.test_client() as client: - assert ApiKey.query.count() == 1 + assert _get_api_key_count() == 1 auth_header = create_admin_authorization_header() response = client.post( url_for( @@ -83,7 +90,7 @@ def test_api_key_should_create_multiple_new_api_key_for_service( ): with notify_api.test_request_context(): with notify_api.test_client() as client: - assert ApiKey.query.count() == 0 + assert _get_api_key_count() == 0 data = { "name": "some secret name", "created_by": str(sample_service.created_by.id), @@ -96,7 +103,7 @@ def test_api_key_should_create_multiple_new_api_key_for_service( headers=[("Content-Type", "application/json"), auth_header], ) assert response.status_code == 201 - assert ApiKey.query.count() == 1 + assert _get_api_key_count() == 1 data["name"] = "another secret name" auth_header = create_admin_authorization_header() @@ -109,7 +116,7 @@ def test_api_key_should_create_multiple_new_api_key_for_service( assert json.loads(response.get_data(as_text=True)) != json.loads( response2.get_data(as_text=True) ) - assert ApiKey.query.count() == 2 + assert _get_api_key_count() == 2 def test_get_api_keys_should_return_all_keys_for_service(notify_api, sample_api_key): @@ -130,7 +137,7 @@ def test_get_api_keys_should_return_all_keys_for_service(notify_api, sample_api_ service_id=one_to_expire.service_id, api_key_id=one_to_expire.id ) - assert ApiKey.query.count() == 4 + assert _get_api_key_count() == 4 auth_header = create_admin_authorization_header() response = client.get( diff --git a/tests/app/user/test_rest_verify.py b/tests/app/user/test_rest_verify.py index ff74f6b57..d32d923bf 100644 --- a/tests/app/user/test_rest_verify.py +++ b/tests/app/user/test_rest_verify.py @@ -5,6 +5,7 @@ import pytest from flask import current_app, url_for from freezegun import freeze_time +from sqlalchemy import func, select import app.celery.tasks from app import db @@ -295,7 +296,7 @@ def test_send_sms_code_returns_204_when_too_many_codes_already_created( ) db.session.add(verify_code) db.session.commit() - assert VerifyCode.query.count() == 5 + assert _get_verify_code_count() == 5 auth_header = create_admin_authorization_header() resp = client.post( url_for( @@ -307,7 +308,12 @@ def test_send_sms_code_returns_204_when_too_many_codes_already_created( headers=[("Content-Type", "application/json"), auth_header], ) assert resp.status_code == 204 - assert VerifyCode.query.count() == 5 + assert _get_verify_code_count() == 5 + + +def _get_verify_code_count(): + stmt = select(func.count()).select_from(VerifyCode) + return db.session.execute(stmt).scalar() or 0 @pytest.mark.parametrize( @@ -341,7 +347,7 @@ def test_send_new_user_email_verification( notify_service = email_verification_template.service assert resp.status_code == 204 notification = Notification.query.first() - assert VerifyCode.query.count() == 0 + assert _get_verify_code_count() == 0 mocked.assert_called_once_with( ([str(notification.id)]), queue="notify-internal-tasks" ) From dd2921406f49de2cbc62c3a66674992331b543f1 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 14:22:08 -0700 Subject: [PATCH 16/45] fix more --- app/commands.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/commands.py b/app/commands.py index c88e2bcb3..b625df464 100644 --- a/app/commands.py +++ b/app/commands.py @@ -655,7 +655,8 @@ def populate_annual_billing_with_defaults(year, missing_services_only): AnnualBilling.financial_year_start == year, ), ) - .where(Service.active, AnnualBilling.id == None) # noqa + .where(Service.active) + .where(AnnualBilling.id == None) # noqa ) active_services = db.session.execute(stmt).scalars().all() else: From 873079609a9f910e46b906df2b64481ebd54b133 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 14:52:32 -0700 Subject: [PATCH 17/45] revert test to 1.4 --- tests/app/test_commands.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index b4d9033b1..4acb9eedf 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -372,11 +372,10 @@ def test_populate_annual_billing_with_defaults_sets_free_allowance_to_zero_if_pr populate_annual_billing_with_defaults, ["-y", 2022] ) - stmt = select(AnnualBilling).where( + results = AnnualBilling.query.filter( AnnualBilling.financial_year_start == 2022, AnnualBilling.service_id == service.id, - ) - results = db.session.execute(stmt).scalars().all() + ).all() assert len(results) == 1 assert results[0].free_sms_fragment_limit == 0 From 9bdc29b003560e06ea7d78d506ab656bdfe20ded Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Wed, 30 Oct 2024 15:04:30 -0700 Subject: [PATCH 18/45] revert test to 1.4 --- app/commands.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/app/commands.py b/app/commands.py index b625df464..4893b0056 100644 --- a/app/commands.py +++ b/app/commands.py @@ -646,8 +646,8 @@ def populate_annual_billing_with_defaults(year, missing_services_only): This is useful to ensure all services start the new year with the correct annual billing. """ if missing_services_only: - stmt = ( - select(Service) + active_services = ( + Service.query.filter(Service.active) .outerjoin( AnnualBilling, and_( @@ -655,19 +655,20 @@ def populate_annual_billing_with_defaults(year, missing_services_only): AnnualBilling.financial_year_start == year, ), ) - .where(Service.active) - .where(AnnualBilling.id == None) # noqa + .filter(AnnualBilling.id == None) # noqa + .all() ) - active_services = db.session.execute(stmt).scalars().all() else: - stmt = select(Service).where(Service.active) - active_services = db.session.execute(stmt).scalars().all() + active_services = Service.query.filter(Service.active).all() previous_year = year - 1 - stmt = select(AnnualBilling.id).where( - AnnualBilling.financial_year_start == previous_year, - AnnualBilling.free_sms_fragment_limit == 0, + services_with_zero_free_allowance = ( + db.session.query(AnnualBilling.service_id) + .filter( + AnnualBilling.financial_year_start == previous_year, + AnnualBilling.free_sms_fragment_limit == 0, + ) + .all() ) - services_with_zero_free_allowance = db.session.execute(stmt).scalars().all() for service in active_services: # If a service has free_sms_fragment_limit for the previous year From 331cc413892536cdf2282415f851a996c52351d9 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 09:17:49 -0700 Subject: [PATCH 19/45] fix more --- app/dao/invited_user_dao.py | 23 ++++++++++++++++------- app/dao/notifications_dao.py | 1 + app/dao/provider_details_dao.py | 7 ++++--- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/app/dao/invited_user_dao.py b/app/dao/invited_user_dao.py index a342f504d..631086f30 100644 --- a/app/dao/invited_user_dao.py +++ b/app/dao/invited_user_dao.py @@ -1,5 +1,7 @@ from datetime import timedelta +from sqlalchemy import select + from app import db from app.enums import InvitedUserStatus from app.models import InvitedUser @@ -12,30 +14,37 @@ def save_invited_user(invited_user): def get_invited_user_by_service_and_id(service_id, invited_user_id): - return InvitedUser.query.filter( + + stmt = select(InvitedUser).where( InvitedUser.service_id == service_id, InvitedUser.id == invited_user_id, - ).one() + ) + return db.session.execute(stmt).scalars().one() def get_expired_invite_by_service_and_id(service_id, invited_user_id): - return InvitedUser.query.filter( + stmt = select(InvitedUser).where( InvitedUser.service_id == service_id, InvitedUser.id == invited_user_id, InvitedUser.status == InvitedUserStatus.EXPIRED, - ).one() + ) + return db.session.execute(stmt).scalars().all() def get_invited_user_by_id(invited_user_id): - return InvitedUser.query.filter(InvitedUser.id == invited_user_id).one() + stmt = select(InvitedUser).where(InvitedUser.id == invited_user_id) + return db.session.execute(stmt).scalars().one() def get_expired_invited_users_for_service(service_id): - return InvitedUser.query.filter(InvitedUser.service_id == service_id).all() + # TODO why does this return all invited users? + stmt = select(InvitedUser).where(InvitedUser.service_id == service_id) + return db.session.execute(stmt).scalars().all() def get_invited_users_for_service(service_id): - return InvitedUser.query.filter(InvitedUser.service_id == service_id).all() + stmt = select(InvitedUser).where(InvitedUser.service_id == service_id) + return db.session.execute(stmt).scalars().all() def expire_invitations_created_more_than_two_days_ago(): diff --git a/app/dao/notifications_dao.py b/app/dao/notifications_dao.py index 1d07473c1..cbde45d30 100644 --- a/app/dao/notifications_dao.py +++ b/app/dao/notifications_dao.py @@ -192,6 +192,7 @@ def get_notifications_for_job( ): if page_size is None: page_size = current_app.config["PAGE_SIZE"] + query = Notification.query.filter_by(service_id=service_id, job_id=job_id) query = _filter_query(query, filter_dict) return query.order_by(asc(Notification.job_row_number)).paginate( diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index b0ab48d09..fca8ebd73 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -1,7 +1,7 @@ from datetime import datetime from flask import current_app -from sqlalchemy import desc, func +from sqlalchemy import desc, func, select from app import db from app.dao.dao_utils import autocommit @@ -11,11 +11,12 @@ def get_provider_details_by_id(provider_details_id): - return ProviderDetails.query.get(provider_details_id) + return db.session.get(ProviderDetails, provider_details_id) def get_provider_details_by_identifier(identifier): - return ProviderDetails.query.filter_by(identifier=identifier).one() + stmt = select(ProviderDetails).where(identifier=identifier) + return db.session.execute(stmt).scalars().one() def get_alternative_sms_provider(identifier): From d28c1f7d85646a095d79ec63515f6b00e41197a1 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 09:37:48 -0700 Subject: [PATCH 20/45] fix more --- app/dao/provider_details_dao.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index fca8ebd73..54f36b372 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -15,7 +15,7 @@ def get_provider_details_by_id(provider_details_id): def get_provider_details_by_identifier(identifier): - stmt = select(ProviderDetails).where(identifier=identifier) + stmt = select(ProviderDetails).where(ProviderDetails.identifier == identifier) return db.session.execute(stmt).scalars().one() @@ -26,12 +26,13 @@ def get_alternative_sms_provider(identifier): def dao_get_provider_versions(provider_id): - return ( - ProviderDetailsHistory.query.filter_by(id=provider_id) + stmt = ( + select(ProviderDetailsHistory) + .where(ProviderDetailsHistory.id == provider_id) .order_by(desc(ProviderDetailsHistory.version)) - .limit(100) # limit results instead of adding pagination - .all() ) + # limit results instead of adding pagination + return db.session.execute(stmt).limit(100).scalars().all() def _get_sms_providers_for_update(time_threshold): @@ -43,14 +44,15 @@ def _get_sms_providers_for_update(time_threshold): release the transaction in that case """ # get current priority of both providers - q = ( - ProviderDetails.query.filter( + stmt = ( + select(ProviderDetails) + .where( ProviderDetails.notification_type == NotificationType.SMS, ProviderDetails.active, ) .with_for_update() - .all() ) + q = db.session.execute(stmt).scalars().all() # if something updated recently, don't update again. If the updated_at is null, treat it as min time if any( @@ -73,7 +75,8 @@ def get_provider_details_by_notification_type( if supports_international: filters.append(ProviderDetails.supports_international == supports_international) - return ProviderDetails.query.filter(*filters).all() + stmt = select(ProviderDetails).where(*filters) + return db.session.execute(stmt).scalars().all() @autocommit From 6b6bc2b4e76f1c7e63ac3d92d40d953d73c3c326 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 10:03:15 -0700 Subject: [PATCH 21/45] fix more --- app/dao/invited_user_dao.py | 2 +- app/dao/provider_details_dao.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/app/dao/invited_user_dao.py b/app/dao/invited_user_dao.py index 631086f30..49f953e26 100644 --- a/app/dao/invited_user_dao.py +++ b/app/dao/invited_user_dao.py @@ -28,7 +28,7 @@ def get_expired_invite_by_service_and_id(service_id, invited_user_id): InvitedUser.id == invited_user_id, InvitedUser.status == InvitedUserStatus.EXPIRED, ) - return db.session.execute(stmt).scalars().all() + return db.session.execute(stmt).scalars().one() def get_invited_user_by_id(invited_user_id): diff --git a/app/dao/provider_details_dao.py b/app/dao/provider_details_dao.py index 54f36b372..1b094273b 100644 --- a/app/dao/provider_details_dao.py +++ b/app/dao/provider_details_dao.py @@ -30,9 +30,10 @@ def dao_get_provider_versions(provider_id): select(ProviderDetailsHistory) .where(ProviderDetailsHistory.id == provider_id) .order_by(desc(ProviderDetailsHistory.version)) + .limit(100) ) # limit results instead of adding pagination - return db.session.execute(stmt).limit(100).scalars().all() + return db.session.execute(stmt).scalars().all() def _get_sms_providers_for_update(time_threshold): From bc7180185b73263b27870ec319ba834f17ccdbd6 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 11:32:27 -0700 Subject: [PATCH 22/45] fix more --- .ds.baseline | 6 +++--- app/celery/scheduled_tasks.py | 11 ++++++---- app/commands.py | 10 ++++++---- ...t_notification_dao_delete_notifications.py | 13 +++++++++--- tests/app/dao/test_events_dao.py | 11 ++++++++-- tests/app/service/test_sender.py | 10 ++++++++-- .../test_template_folder_rest.py | 10 ++++++++-- tests/app/test_commands.py | 11 ++++++---- tests/app/user/test_rest.py | 20 +++++++++++++++---- 9 files changed, 74 insertions(+), 28 deletions(-) diff --git a/.ds.baseline b/.ds.baseline index 41a911ddd..eb730eedd 100644 --- a/.ds.baseline +++ b/.ds.baseline @@ -341,7 +341,7 @@ "filename": "tests/app/user/test_rest.py", "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", "is_verified": false, - "line_number": 106, + "line_number": 108, "is_secret": false }, { @@ -349,7 +349,7 @@ "filename": "tests/app/user/test_rest.py", "hashed_secret": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", "is_verified": false, - "line_number": 810, + "line_number": 822, "is_secret": false } ], @@ -384,5 +384,5 @@ } ] }, - "generated_at": "2024-10-30T18:15:03Z" + "generated_at": "2024-10-31T18:32:23Z" } diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 3597bdbb7..504b77f56 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -1,10 +1,10 @@ from datetime import timedelta from flask import current_app -from sqlalchemy import between +from sqlalchemy import between, select from sqlalchemy.exc import SQLAlchemyError -from app import notify_celery, zendesk_client +from app import db, notify_celery, zendesk_client from app.celery.tasks import ( get_recipient_csv_and_template_and_sender_id, process_incomplete_jobs, @@ -105,15 +105,18 @@ def check_job_status(): thirty_minutes_ago = utc_now() - timedelta(minutes=30) thirty_five_minutes_ago = utc_now() - timedelta(minutes=35) - incomplete_in_progress_jobs = Job.query.filter( + stmt = select(Job).where( Job.job_status == JobStatus.IN_PROGRESS, between(Job.processing_started, thirty_five_minutes_ago, thirty_minutes_ago), ) - incomplete_pending_jobs = Job.query.filter( + incomplete_in_progress_jobs = db.session.execute(stmt).scalars().all() + + stmt = select(Job).where( Job.job_status == JobStatus.PENDING, Job.scheduled_for.isnot(None), between(Job.scheduled_for, thirty_five_minutes_ago, thirty_minutes_ago), ) + incomplete_pending_jobs = db.session.execute(stmt).scalars().all() jobs_not_complete_after_30_minutes = ( incomplete_in_progress_jobs.union(incomplete_pending_jobs) diff --git a/app/commands.py b/app/commands.py index 4893b0056..79bd3192d 100644 --- a/app/commands.py +++ b/app/commands.py @@ -646,8 +646,9 @@ def populate_annual_billing_with_defaults(year, missing_services_only): This is useful to ensure all services start the new year with the correct annual billing. """ if missing_services_only: - active_services = ( - Service.query.filter(Service.active) + stmt = ( + select(Service) + .where(Service.active) .outerjoin( AnnualBilling, and_( @@ -656,10 +657,11 @@ def populate_annual_billing_with_defaults(year, missing_services_only): ), ) .filter(AnnualBilling.id == None) # noqa - .all() ) + active_services = db.session.execute(stmt).scalars().all() else: - active_services = Service.query.filter(Service.active).all() + stmt = select(Service).where(Service.active) + active_services = db.session.execute(stmt).scalars().all() previous_year = year - 1 services_with_zero_free_allowance = ( db.session.query(AnnualBilling.service_id) diff --git a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py index 45d8958d1..fbe365e00 100644 --- a/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py +++ b/tests/app/dao/notification_dao/test_notification_dao_delete_notifications.py @@ -42,12 +42,17 @@ def test_move_notifications_does_nothing_if_notification_history_row_already_exi 1, ) - assert Notification.query.count() == 0 + assert _get_notification_count() == 0 history = NotificationHistory.query.all() assert len(history) == 1 assert history[0].status == NotificationStatus.DELIVERED +def _get_notification_count(): + stmt = select(func.count()).select_from(Notification) + return db.session.execute(stmt).scalar() or 0 + + def test_move_notifications_only_moves_notifications_older_than_provided_timestamp( sample_template, ): @@ -172,8 +177,10 @@ def test_move_notifications_just_deletes_test_key_notifications(sample_template) assert result == 2 - assert Notification.query.count() == 0 - assert NotificationHistory.query.count() == 2 + assert _get_notification_count() == 0 + stmt = select(func.count()).select_from(NotificationHistory) + count = db.session.execute(stmt).scalar() or 0 + assert count == 2 stmt = ( select(func.count()) .select_from(NotificationHistory) diff --git a/tests/app/dao/test_events_dao.py b/tests/app/dao/test_events_dao.py index 2647aafcb..60c977af6 100644 --- a/tests/app/dao/test_events_dao.py +++ b/tests/app/dao/test_events_dao.py @@ -1,9 +1,14 @@ +from sqlalchemy import func, select + +from app import db from app.dao.events_dao import dao_create_event from app.models import Event def test_create_event(notify_db_session): - assert Event.query.count() == 0 + stmt = select(func.count()).select_from(Event) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 data = { "event_type": "sucessful_login", "data": {"something": "random", "in_fact": "could be anything"}, @@ -12,6 +17,8 @@ def test_create_event(notify_db_session): event = Event(**data) dao_create_event(event) - assert Event.query.count() == 1 + stmt = select(func.count()).select_from(Event) + count = db.session.execute(stmt).scalar() or 0 + assert count == 1 event_from_db = Event.query.first() assert event == event_from_db diff --git a/tests/app/service/test_sender.py b/tests/app/service/test_sender.py index caae265c8..4b9c10ee1 100644 --- a/tests/app/service/test_sender.py +++ b/tests/app/service/test_sender.py @@ -1,6 +1,8 @@ import pytest from flask import current_app +from sqlalchemy import func, select +from app import db from app.dao.services_dao import dao_add_user_to_service from app.enums import NotificationType, TemplateType from app.models import Notification @@ -23,7 +25,9 @@ def test_send_notification_to_service_users_persists_notifications_correctly( notification = Notification.query.one() - assert Notification.query.count() == 1 + stmt = select(func.count()).select_from(Notification) + count = db.session.execute(stmt).scalar() or 0 + assert count == 1 assert notification.to == "1" assert str(notification.service_id) == current_app.config["NOTIFY_SERVICE_ID"] assert notification.template.id == template.id @@ -89,4 +93,6 @@ def test_send_notification_to_service_users_sends_to_active_users_only( send_notification_to_service_users(service_id=service.id, template_id=template.id) - assert Notification.query.count() == 2 + stmt = select(func.count()).select_from(Notification) + count = db.session.execute(stmt).scalar() or 0 + assert count == 2 diff --git a/tests/app/template_folder/test_template_folder_rest.py b/tests/app/template_folder/test_template_folder_rest.py index 6461ad3df..3bd2b4ee9 100644 --- a/tests/app/template_folder/test_template_folder_rest.py +++ b/tests/app/template_folder/test_template_folder_rest.py @@ -1,7 +1,9 @@ import uuid import pytest +from sqlalchemy import func, select +from app import db from app.dao.service_user_dao import dao_get_service_user from app.models import TemplateFolder from tests.app.db import ( @@ -286,7 +288,9 @@ def test_delete_template_folder_fails_if_folder_has_subfolders( assert resp == {"result": "error", "message": "Folder is not empty"} - assert TemplateFolder.query.count() == 2 + stmt = select(func.count()).select_from(TemplateFolder) + count = db.session.execute(stmt).scalar() or 0 + assert count == 2 def test_delete_template_folder_fails_if_folder_contains_templates( @@ -304,7 +308,9 @@ def test_delete_template_folder_fails_if_folder_contains_templates( assert resp == {"result": "error", "message": "Folder is not empty"} - assert TemplateFolder.query.count() == 1 + stmt = select(func.count()).select_from(TemplateFolder) + count = db.session.execute(stmt).scalar() or 0 + assert count == 1 @pytest.mark.parametrize( diff --git a/tests/app/test_commands.py b/tests/app/test_commands.py index 4acb9eedf..e4a27c0e2 100644 --- a/tests/app/test_commands.py +++ b/tests/app/test_commands.py @@ -414,17 +414,20 @@ def test_create_service_command(notify_db_session, notify_api): user = User.query.first() - service_count = Service.query.count() + stmt = select(func.count()).select_from(Service) + service_count = db.session.execute(stmt).scalar() or 0 # run the command - result = notify_api.test_cli_runner().invoke( + notify_api.test_cli_runner().invoke( create_new_service, ["-e", "somebody@fake.gov", "-n", "Fake Service", "-c", user.id], ) - print(result) # there should be one more service - assert Service.query.count() == service_count + 1 + + stmt = select(func.count()).select_from(Service) + count = db.session.execute(stmt).scalar() or 0 + assert count == service_count + 1 # that service should be the one we added stmt = select(Service).where(Service.name == "Fake Service") diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 4e064ca8e..399b708c6 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -6,7 +6,9 @@ import pytest from flask import current_app from freezegun import freeze_time +from sqlalchemy import func, select +from app import db from app.dao.service_user_dao import dao_get_service_user, dao_update_service_user from app.enums import AuthType, KeyType, NotificationType, PermissionType from app.models import Notification, Permission, User @@ -153,12 +155,17 @@ def test_post_user_missing_attribute_email(admin_request, notify_db_session): } json_resp = admin_request.post("user.create_user", _data=data, _expected_status=400) - assert User.query.count() == 0 + assert _get_user_count() == 0 assert {"email_address": ["Missing data for required field."]} == json_resp[ "message" ] +def _get_user_count(): + stmt = select(func.count()).select_from(User) + return db.session.execute(stmt).scalar() or 0 + + def test_create_user_missing_attribute_password(admin_request, notify_db_session): """ Tests POST endpoint '/' missing attribute password. @@ -174,7 +181,7 @@ def test_create_user_missing_attribute_password(admin_request, notify_db_session "permissions": {}, } json_resp = admin_request.post("user.create_user", _data=data, _expected_status=400) - assert User.query.count() == 0 + assert _get_user_count() == 0 assert {"password": ["Missing data for required field."]} == json_resp["message"] @@ -512,8 +519,13 @@ def test_set_user_permissions_remove_old(admin_request, sample_user, sample_serv _expected_status=204, ) - query = Permission.query.filter_by(user=sample_user) - assert query.count() == 1 + query = ( + select(func.count()) + .select_from(Permission) + .where(Permission.user == sample_user) + ) + count = db.session.execute(query).scalar() or 0 + assert count == 1 assert query.first().permission == PermissionType.MANAGE_SETTINGS From c33c7a5058640deb9ade13299b5608952b7cd5bb Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 11:48:08 -0700 Subject: [PATCH 23/45] fix more --- .ds.baseline | 4 ++-- app/celery/scheduled_tasks.py | 15 +++++++-------- tests/app/user/test_rest.py | 14 +++++++++----- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/.ds.baseline b/.ds.baseline index eb730eedd..bcae9f1e8 100644 --- a/.ds.baseline +++ b/.ds.baseline @@ -349,7 +349,7 @@ "filename": "tests/app/user/test_rest.py", "hashed_secret": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", "is_verified": false, - "line_number": 822, + "line_number": 826, "is_secret": false } ], @@ -384,5 +384,5 @@ } ] }, - "generated_at": "2024-10-31T18:32:23Z" + "generated_at": "2024-10-31T18:48:03Z" } diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 504b77f56..86d801f88 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -105,24 +105,23 @@ def check_job_status(): thirty_minutes_ago = utc_now() - timedelta(minutes=30) thirty_five_minutes_ago = utc_now() - timedelta(minutes=35) - stmt = select(Job).where( + incomplete_in_progress_jobs = select(Job).where( Job.job_status == JobStatus.IN_PROGRESS, between(Job.processing_started, thirty_five_minutes_ago, thirty_minutes_ago), ) - incomplete_in_progress_jobs = db.session.execute(stmt).scalars().all() + # incomplete_in_progress_jobs = db.session.execute(stmt).scalars().all() - stmt = select(Job).where( + incomplete_pending_jobs = select(Job).where( Job.job_status == JobStatus.PENDING, Job.scheduled_for.isnot(None), between(Job.scheduled_for, thirty_five_minutes_ago, thirty_minutes_ago), ) - incomplete_pending_jobs = db.session.execute(stmt).scalars().all() + # incomplete_pending_jobs = db.session.execute(stmt).scalars().all() - jobs_not_complete_after_30_minutes = ( - incomplete_in_progress_jobs.union(incomplete_pending_jobs) - .order_by(Job.processing_started, Job.scheduled_for) - .all() + stmt = incomplete_in_progress_jobs.union(incomplete_pending_jobs).order_by( + Job.processing_started, Job.scheduled_for ) + jobs_not_complete_after_30_minutes = db.session.execute(stmt).scalars().all() # temporarily mark them as ERROR so that they don't get picked up by future check_job_status tasks # if they haven't been re-processed in time. diff --git a/tests/app/user/test_rest.py b/tests/app/user/test_rest.py index 399b708c6..f1ea5041b 100644 --- a/tests/app/user/test_rest.py +++ b/tests/app/user/test_rest.py @@ -336,7 +336,8 @@ def test_post_user_attribute_with_updated_by_sends_notification_to_international _data=update_dict, ) - notification = Notification.query.first() + stmt = select(Notification) + notification = db.session.execute(stmt).scalars().first() assert ( notification.reply_to_text == current_app.config["NOTIFY_INTERNATIONAL_SMS_SENDER"] @@ -526,7 +527,9 @@ def test_set_user_permissions_remove_old(admin_request, sample_user, sample_serv ) count = db.session.execute(query).scalar() or 0 assert count == 1 - assert query.first().permission == PermissionType.MANAGE_SETTINGS + query = select(Permission).where(Permission.user == sample_user) + first_permission = db.session.execute(query).scalars().first() + assert first_permission.permission == PermissionType.MANAGE_SETTINGS def test_set_user_folder_permissions(admin_request, sample_user, sample_service): @@ -658,7 +661,8 @@ def test_send_already_registered_email( _expected_status=204, ) - notification = Notification.query.first() + stmt = select(Notification) + notification = db.session.execute(stmt).scalars().first() mocked.assert_called_once_with( ([str(notification.id)]), queue="notify-internal-tasks" ) @@ -696,8 +700,8 @@ def test_send_user_confirm_new_email_returns_204( _data=data, _expected_status=204, ) - - notification = Notification.query.first() + stmt = select(Notification) + notification = db.session.execute(stmt).scalars().first() mocked.assert_called_once_with( ([str(notification.id)]), queue="notify-internal-tasks" ) From 3dd21705b88943369c0042c3994e39d6cd56b108 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 12:00:42 -0700 Subject: [PATCH 24/45] fix more --- app/celery/scheduled_tasks.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/app/celery/scheduled_tasks.py b/app/celery/scheduled_tasks.py index 86d801f88..3597bdbb7 100644 --- a/app/celery/scheduled_tasks.py +++ b/app/celery/scheduled_tasks.py @@ -1,10 +1,10 @@ from datetime import timedelta from flask import current_app -from sqlalchemy import between, select +from sqlalchemy import between from sqlalchemy.exc import SQLAlchemyError -from app import db, notify_celery, zendesk_client +from app import notify_celery, zendesk_client from app.celery.tasks import ( get_recipient_csv_and_template_and_sender_id, process_incomplete_jobs, @@ -105,23 +105,21 @@ def check_job_status(): thirty_minutes_ago = utc_now() - timedelta(minutes=30) thirty_five_minutes_ago = utc_now() - timedelta(minutes=35) - incomplete_in_progress_jobs = select(Job).where( + incomplete_in_progress_jobs = Job.query.filter( Job.job_status == JobStatus.IN_PROGRESS, between(Job.processing_started, thirty_five_minutes_ago, thirty_minutes_ago), ) - # incomplete_in_progress_jobs = db.session.execute(stmt).scalars().all() - - incomplete_pending_jobs = select(Job).where( + incomplete_pending_jobs = Job.query.filter( Job.job_status == JobStatus.PENDING, Job.scheduled_for.isnot(None), between(Job.scheduled_for, thirty_five_minutes_ago, thirty_minutes_ago), ) - # incomplete_pending_jobs = db.session.execute(stmt).scalars().all() - stmt = incomplete_in_progress_jobs.union(incomplete_pending_jobs).order_by( - Job.processing_started, Job.scheduled_for + jobs_not_complete_after_30_minutes = ( + incomplete_in_progress_jobs.union(incomplete_pending_jobs) + .order_by(Job.processing_started, Job.scheduled_for) + .all() ) - jobs_not_complete_after_30_minutes = db.session.execute(stmt).scalars().all() # temporarily mark them as ERROR so that they don't get picked up by future check_job_status tasks # if they haven't been re-processed in time. From 78ac1ee094004b3cef75b598f81f540fd78a8158 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 14:25:35 -0700 Subject: [PATCH 25/45] fix more --- .ds.baseline | 6 ++--- tests/app/celery/test_reporting_tasks.py | 23 ++++++++++++------- .../test_receive_notification.py | 11 +++++++-- .../test_send_notification.py | 11 +++++++-- 4 files changed, 36 insertions(+), 15 deletions(-) diff --git a/.ds.baseline b/.ds.baseline index bcae9f1e8..8aaa131c5 100644 --- a/.ds.baseline +++ b/.ds.baseline @@ -277,7 +277,7 @@ "filename": "tests/app/notifications/test_receive_notification.py", "hashed_secret": "913a73b565c8e2c8ed94497580f619397709b8b6", "is_verified": false, - "line_number": 24, + "line_number": 26, "is_secret": false }, { @@ -285,7 +285,7 @@ "filename": "tests/app/notifications/test_receive_notification.py", "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", "is_verified": false, - "line_number": 54, + "line_number": 56, "is_secret": false } ], @@ -384,5 +384,5 @@ } ] }, - "generated_at": "2024-10-31T18:48:03Z" + "generated_at": "2024-10-31T21:25:32Z" } diff --git a/tests/app/celery/test_reporting_tasks.py b/tests/app/celery/test_reporting_tasks.py index a32f68fc3..124038d48 100644 --- a/tests/app/celery/test_reporting_tasks.py +++ b/tests/app/celery/test_reporting_tasks.py @@ -4,7 +4,9 @@ import pytest from freezegun import freeze_time +from sqlalchemy import select +from app import db from app.celery.reporting_tasks import ( create_nightly_billing, create_nightly_billing_for_day, @@ -132,11 +134,11 @@ def test_create_nightly_billing_for_day_checks_history( status=NotificationStatus.DELIVERED, ) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 0 create_nightly_billing_for_day(str(yesterday.date())) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 1 record = records[0] @@ -144,6 +146,11 @@ def test_create_nightly_billing_for_day_checks_history( assert record.notifications_sent == 2 +def _get_fact_billing_records(): + stmt = select(FactBilling) + return db.session.execute(stmt).scalars().all() + + @pytest.mark.parametrize( "second_rate, records_num, billable_units, multiplier", [(1.0, 1, 2, [1]), (2.0, 2, 1, [1, 2])], @@ -181,7 +188,7 @@ def test_create_nightly_billing_for_day_sms_rate_multiplier( billable_units=1, ) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 0 create_nightly_billing_for_day(str(yesterday.date())) @@ -221,7 +228,7 @@ def test_create_nightly_billing_for_day_different_templates( billable_units=0, ) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 0 create_nightly_billing_for_day(str(yesterday.date())) @@ -265,7 +272,7 @@ def test_create_nightly_billing_for_day_same_sent_by( billable_units=1, ) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 0 create_nightly_billing_for_day(str(yesterday.date())) @@ -296,11 +303,11 @@ def test_create_nightly_billing_for_day_null_sent_by_sms( billable_units=1, ) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 0 create_nightly_billing_for_day(str(yesterday.date())) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 1 record = records[0] @@ -384,7 +391,7 @@ def test_create_nightly_billing_for_day_update_when_record_exists( billable_units=1, ) - records = FactBilling.query.all() + records = _get_fact_billing_records() assert len(records) == 0 create_nightly_billing_for_day("2018-01-14") diff --git a/tests/app/notifications/test_receive_notification.py b/tests/app/notifications/test_receive_notification.py index c95088803..e13b8d82e 100644 --- a/tests/app/notifications/test_receive_notification.py +++ b/tests/app/notifications/test_receive_notification.py @@ -4,7 +4,9 @@ import pytest from flask import json +from sqlalchemy import func, select +from app import db from app.enums import ServicePermissionType from app.models import InboundSms from app.notifications.receive_notifications import ( @@ -99,7 +101,9 @@ def test_receive_notification_from_sns_without_permissions_does_not_persist( parsed_response = json.loads(response.get_data(as_text=True)) assert parsed_response["result"] == "success" - assert InboundSms.query.count() == 0 + stmt = select(func.count()).select_from(InboundSms) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 assert mocked.called is False @@ -285,7 +289,10 @@ def test_receive_notification_error_if_not_single_matching_service( # we still return 'RECEIVED' to MMG assert response.status_code == 200 assert response.get_data(as_text=True) == "RECEIVED" - assert InboundSms.query.count() == 0 + + stmt = select(func.count()).select_from(InboundSms) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 @pytest.mark.skip(reason="Need to implement inbound SNS tests. Body here from MMG") diff --git a/tests/app/service/send_notification/test_send_notification.py b/tests/app/service/send_notification/test_send_notification.py index dcd6cc8e7..fd37f7592 100644 --- a/tests/app/service/send_notification/test_send_notification.py +++ b/tests/app/service/send_notification/test_send_notification.py @@ -5,8 +5,10 @@ from flask import current_app, json from freezegun import freeze_time from notifications_python_client.authentication import create_jwt_token +from sqlalchemy import func, select import app +from app import db from app.dao import notifications_dao from app.dao.api_key_dao import save_model_api_key from app.dao.services_dao import dao_update_service @@ -883,7 +885,9 @@ def test_should_not_persist_notification_or_send_email_if_simulated_email( assert response.status_code == 201 apply_async.assert_not_called() - assert Notification.query.count() == 0 + stmt = select(func.count()).select_from(Notification) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 @pytest.mark.parametrize("to_sms", ["+14254147755", "+14254147167"]) @@ -906,7 +910,10 @@ def test_should_not_persist_notification_or_send_sms_if_simulated_number( assert response.status_code == 201 apply_async.assert_not_called() - assert Notification.query.count() == 0 + + stmt = select(func.count()).select_from(Notification) + count = db.session.execute(stmt).scalar() or 0 + assert count == 0 @pytest.mark.parametrize("key_type", [KeyType.NORMAL, KeyType.TEAM]) From c5b227403ea5b3c40be5e7758d9bfff1c4bdc070 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Thu, 31 Oct 2024 14:38:07 -0700 Subject: [PATCH 26/45] fix more --- tests/app/organization/test_rest.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/tests/app/organization/test_rest.py b/tests/app/organization/test_rest.py index a9d7db135..1d521ca9c 100644 --- a/tests/app/organization/test_rest.py +++ b/tests/app/organization/test_rest.py @@ -4,8 +4,10 @@ import pytest from flask import current_app from freezegun import freeze_time +from sqlalchemy import select from sqlalchemy.exc import SQLAlchemyError +from app import db from app.dao.organization_dao import ( dao_add_service_to_organization, dao_add_user_to_organization, @@ -175,7 +177,7 @@ def test_post_create_organization(admin_request, notify_db_session): "organization.create_organization", _data=data, _expected_status=201 ) - organizations = Organization.query.all() + organizations = _get_organizations() assert data["name"] == response["name"] assert data["active"] == response["active"] @@ -186,6 +188,11 @@ def test_post_create_organization(admin_request, notify_db_session): assert organizations[0].email_branding_id is None +def _get_organizations(): + stmt = select(Organization) + return db.session.execute(stmt).scalars().all() + + @pytest.mark.parametrize("org_type", ["nhs_central", "nhs_local", "nhs_gp"]) @pytest.mark.skip(reason="Update for TTS") def test_post_create_organization_sets_default_nhs_branding_for_nhs_orgs( @@ -201,7 +208,7 @@ def test_post_create_organization_sets_default_nhs_branding_for_nhs_orgs( "organization.create_organization", _data=data, _expected_status=201 ) - organizations = Organization.query.all() + organizations = _get_organizations() assert len(organizations) == 1 assert organizations[0].email_branding_id == uuid.UUID( @@ -212,7 +219,7 @@ def test_post_create_organization_sets_default_nhs_branding_for_nhs_orgs( def test_post_create_organization_existing_name_raises_400( admin_request, sample_organization ): - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 data = { @@ -225,14 +232,14 @@ def test_post_create_organization_existing_name_raises_400( "organization.create_organization", _data=data, _expected_status=400 ) - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 assert response["message"] == "Organization name already exists" def test_post_create_organization_works(admin_request, sample_organization): - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 data = { @@ -245,7 +252,7 @@ def test_post_create_organization_works(admin_request, sample_organization): "organization.create_organization", _data=data, _expected_status=201 ) - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 2 @@ -310,7 +317,7 @@ def test_post_update_organization_updates_fields( _expected_status=204, ) - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 assert organization[0].id == org.id @@ -343,7 +350,7 @@ def test_post_update_organization_updates_domains( _expected_status=204, ) - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 assert [domain.domain for domain in organization[0].domains] == domain_list @@ -383,7 +390,7 @@ def test_post_update_organization_to_nhs_type_updates_branding_if_none_present( _expected_status=204, ) - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 assert organization[0].id == org.id @@ -413,7 +420,7 @@ def test_post_update_organization_to_nhs_type_does_not_update_branding_if_defaul _expected_status=204, ) - organization = Organization.query.all() + organization = _get_organizations() assert len(organization) == 1 assert organization[0].id == org.id @@ -471,7 +478,7 @@ def test_post_update_organization_gives_404_status_if_org_does_not_exist( _expected_status=404, ) - organization = Organization.query.all() + organization = _get_organizations() assert not organization From e80029e5f0ba9cc6429191b823fb455fbc6de55d Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Fri, 25 Oct 2024 16:29:34 -0400 Subject: [PATCH 27/45] Properly handling and validating the state for login.gov Signed-off-by: Cliff Hill --- app/service_invite/rest.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index e7d0d4b20..26718a35f 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -32,7 +32,7 @@ register_errors(service_invite) -def _create_service_invite(invited_user, nonce): +def _create_service_invite(invited_user, nonce, state): template_id = current_app.config["INVITATION_EMAIL_TEMPLATE_ID"] @@ -58,7 +58,7 @@ def _create_service_invite(invited_user, nonce): user_data_url_safe = get_user_data_url_safe(data) - url = url.replace("STATE", user_data_url_safe) + url = url.replace("STATE", state) personalisation = { "user_name": invited_user.from_user.name, @@ -94,11 +94,16 @@ def create_invited_user(service_id): except KeyError: current_app.logger.exception("nonce not found in submitted data.") raise + try: + state = request_json.pop("state") + except KeyError: + current_app.logger.exception("state not found in submitted data.") + raise invited_user = invited_user_schema.load(request_json) save_invited_user(invited_user) - _create_service_invite(invited_user, nonce) + _create_service_invite(invited_user, nonce, state) return jsonify(data=invited_user_schema.dump(invited_user)), 201 From c800fbc2b02a753250298431c0022d9278d675dd Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Fri, 25 Oct 2024 16:33:24 -0400 Subject: [PATCH 28/45] Removing unused code. Signed-off-by: Cliff Hill --- app/service_invite/rest.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 26718a35f..bd600ec88 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -56,8 +56,6 @@ def _create_service_invite(invited_user, nonce, state): url = url.replace("NONCE", nonce) # handed from data sent from admin. - user_data_url_safe = get_user_data_url_safe(data) - url = url.replace("STATE", state) personalisation = { @@ -216,9 +214,3 @@ def validate_service_invitation_token(token): invited_user = get_invited_user_by_id(invited_user_id) return jsonify(data=invited_user_schema.dump(invited_user)), 200 - - -def get_user_data_url_safe(data): - data = json.dumps(data) - data = base64.b64encode(data.encode("utf8")) - return data.decode("utf8") From 123aa7129be3be7badd274a4a16ed932ac41dff7 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Wed, 30 Oct 2024 09:07:08 -0400 Subject: [PATCH 29/45] Making state be validated. Signed-off-by: Cliff Hill --- app/service_invite/rest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index bd600ec88..cc22201b5 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -1,4 +1,3 @@ -import base64 import json import os From 017406cbb6f57f271e13fb828aa76d13e9217001 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Fri, 1 Nov 2024 12:12:30 -0400 Subject: [PATCH 30/45] Fixing tests, and resend invites endpoint. Signed-off-by: Cliff Hill --- app/service_invite/rest.py | 5 ++++- tests/app/service_invite/test_service_invite_rest.py | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index cc22201b5..21ee15ff7 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -155,6 +155,9 @@ def resend_service_invite(service_id, invited_user_id): invited_user_id=invited_user_id, ) + nonce = request.json["nonce"] + state = request.json["state"] + fetched.created_at = utc_now() fetched.status = InvitedUserStatus.PENDING @@ -163,7 +166,7 @@ def resend_service_invite(service_id, invited_user_id): save_invited_user(update_dict) - _create_service_invite(fetched, current_app.config["ADMIN_BASE_URL"]) + _create_service_invite(fetched, nonce, state) return jsonify(data=invited_user_schema.dump(fetched)), 200 diff --git a/tests/app/service_invite/test_service_invite_rest.py b/tests/app/service_invite/test_service_invite_rest.py index 5cea786f5..61b8b79e7 100644 --- a/tests/app/service_invite/test_service_invite_rest.py +++ b/tests/app/service_invite/test_service_invite_rest.py @@ -46,6 +46,7 @@ def test_create_invited_user( auth_type=AuthType.EMAIL, folder_permissions=["folder_1", "folder_2", "folder_3"], nonce="FakeNonce", + state="FakeState", **extra_args, ) @@ -110,6 +111,7 @@ def test_create_invited_user_without_auth_type( "permissions": "send_messages,manage_service,manage_api_keys", "folder_permissions": [], "nonce": "FakeNonce", + "state": "FakeState", } json_resp = admin_request.post( @@ -134,6 +136,7 @@ def test_create_invited_user_invalid_email(client, sample_service, mocker, fake_ "permissions": "send_messages,manage_service,manage_api_keys", "folder_permissions": [fake_uuid, fake_uuid], "nonce": "FakeNonce", + "state": "FakeState", } data = json.dumps(data) @@ -235,6 +238,7 @@ def test_resend_expired_invite( response = client.post( url, headers=[("Content-Type", "application/json"), auth_header], + data='{"nonce": "FakeNonce", "state": "FakeState"}', ) assert response.status_code == 200 From f95738a7634c8069c05645abb5cb7c7e858f2d96 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Wed, 6 Nov 2024 14:30:57 -0500 Subject: [PATCH 31/45] Getting all the needed data in place. Signed-off-by: Cliff Hill --- app/service_invite/rest.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 21ee15ff7..aa14e28ff 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -1,5 +1,7 @@ +import base64 import json import os +from urllib.parse import unquote from flask import Blueprint, current_app, jsonify, request from itsdangerous import BadData, SignatureExpired @@ -51,6 +53,9 @@ def _create_service_invite(invited_user, nonce, state): data["invited_user_id"] = str(invited_user.id) data["invited_user_email"] = invited_user.email_address + invite_redis_key = f"invite-data-{unquote(state)}" + redis_store.set(invite_redis_key, get_user_data_url_safe(data)) + url = os.environ["LOGIN_DOT_GOV_REGISTRATION_URL"] url = url.replace("NONCE", nonce) # handed from data sent from admin. @@ -216,3 +221,9 @@ def validate_service_invitation_token(token): invited_user = get_invited_user_by_id(invited_user_id) return jsonify(data=invited_user_schema.dump(invited_user)), 200 + + +def get_user_data_url_safe(data): + data = json.dumps(data) + data = base64.b64encode(data.encode("utf8")) + return data.decode("utf8") From 958c3cd61ee5a290fcfd1d235beeb4e5e5a4bf82 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Wed, 6 Nov 2024 16:03:50 -0500 Subject: [PATCH 32/45] Trying to get invites to flow correctly. Signed-off-by: Cliff Hill --- app/dao/users_dao.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index 690ecc7f9..bf0f21592 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -52,15 +52,22 @@ def get_login_gov_user(login_uuid, email_address): current_app.logger.exception("Error getting login.gov user") db.session.rollback() + print("In here instead!") return user # Remove this 1 July 2025, all users should have login.gov uuids by now stmt = select(User).filter(User.email_address.ilike(email_address)) user = db.session.execute(stmt).scalars().first() + print("*" * 80) + print(user) + if user: + print(f"login_uuid: {login_uuid}") save_user_attribute(user, {"login_uuid": login_uuid}) return user + print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% WTF") + return None From 70404a2a8b62235601e20f354ae40e670473ce01 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Thu, 7 Nov 2024 08:37:55 -0500 Subject: [PATCH 33/45] Invites are now working. Signed-off-by: Cliff Hill --- app/dao/users_dao.py | 7 ------- app/service/rest.py | 2 +- app/service_invite/rest.py | 6 ++++-- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/app/dao/users_dao.py b/app/dao/users_dao.py index bf0f21592..690ecc7f9 100644 --- a/app/dao/users_dao.py +++ b/app/dao/users_dao.py @@ -52,22 +52,15 @@ def get_login_gov_user(login_uuid, email_address): current_app.logger.exception("Error getting login.gov user") db.session.rollback() - print("In here instead!") return user # Remove this 1 July 2025, all users should have login.gov uuids by now stmt = select(User).filter(User.email_address.ilike(email_address)) user = db.session.execute(stmt).scalars().first() - print("*" * 80) - print(user) - if user: - print(f"login_uuid: {login_uuid}") save_user_attribute(user, {"login_uuid": login_uuid}) return user - print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% WTF") - return None diff --git a/app/service/rest.py b/app/service/rest.py index 6441b74b7..7dd614058 100644 --- a/app/service/rest.py +++ b/app/service/rest.py @@ -373,7 +373,7 @@ def add_user_to_service(service_id, user_id): service = dao_fetch_service_by_id(service_id) user = get_user_by_id(user_id=user_id) if user in service.users: - error = "User id: {} already part of service id: {}".format(user_id, service_id) + error = f"User id: {user_id} already part of service id: {service_id}" raise InvalidRequest(error, status_code=400) data = request.get_json() diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index aa14e28ff..1b25fe92c 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -87,6 +87,8 @@ def _create_service_invite(invited_user, nonce, state): ) send_notification_to_queue(saved_notification, queue=QueueNames.NOTIFY) + return data + @service_invite.route("/service//invite", methods=["POST"]) def create_invited_user(service_id): @@ -105,9 +107,9 @@ def create_invited_user(service_id): invited_user = invited_user_schema.load(request_json) save_invited_user(invited_user) - _create_service_invite(invited_user, nonce, state) + invite_data = _create_service_invite(invited_user, nonce, state) - return jsonify(data=invited_user_schema.dump(invited_user)), 201 + return jsonify(data=invited_user_schema.dump(invited_user), invite=invite_data), 201 @service_invite.route("/service//invite/expired", methods=["GET"]) From 15711430124ec39f6819e859cb9df58d4b066b79 Mon Sep 17 00:00:00 2001 From: Cliff Hill Date: Thu, 7 Nov 2024 09:56:09 -0500 Subject: [PATCH 34/45] Resend invites works. Signed-off-by: Cliff Hill --- app/service_invite/rest.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/app/service_invite/rest.py b/app/service_invite/rest.py index 1b25fe92c..38bc1c404 100644 --- a/app/service_invite/rest.py +++ b/app/service_invite/rest.py @@ -157,14 +157,23 @@ def resend_service_invite(service_id, invited_user_id): Note: This ignores the POST data entirely. """ + request_json = request.get_json() + try: + nonce = request_json.pop("nonce") + except KeyError: + current_app.logger.exception("nonce not found in submitted data.") + raise + try: + state = request_json.pop("state") + except KeyError: + current_app.logger.exception("state not found in submitted data.") + raise + fetched = get_expired_invite_by_service_and_id( service_id=service_id, invited_user_id=invited_user_id, ) - nonce = request.json["nonce"] - state = request.json["state"] - fetched.created_at = utc_now() fetched.status = InvitedUserStatus.PENDING @@ -173,9 +182,9 @@ def resend_service_invite(service_id, invited_user_id): save_invited_user(update_dict) - _create_service_invite(fetched, nonce, state) + invite_data = _create_service_invite(fetched, nonce, state) - return jsonify(data=invited_user_schema.dump(fetched)), 200 + return jsonify(data=invited_user_schema.dump(fetched), invite=invite_data), 200 def invited_user_url(invited_user_id, invite_link_host=None): From 6df7a218a7e6715e9b452ac0b6af8191032e5591 Mon Sep 17 00:00:00 2001 From: Andrew Shumway Date: Thu, 7 Nov 2024 09:45:56 -0700 Subject: [PATCH 35/45] Remove scalars from execute and add test coverage --- app/dao/services_dao.py | 2 +- tests/app/service/test_rest.py | 40 ++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/app/dao/services_dao.py b/app/dao/services_dao.py index 1f8956865..260008193 100644 --- a/app/dao/services_dao.py +++ b/app/dao/services_dao.py @@ -474,7 +474,7 @@ def dao_fetch_stats_for_service_from_days(service_id, start_date, end_date): func.date_trunc("day", NotificationAllTimeView.created_at), ) ) - return db.session.execute(stmt).scalars().all() + return db.session.execute(stmt).all() def dao_fetch_stats_for_service_from_days_for_user( diff --git a/tests/app/service/test_rest.py b/tests/app/service/test_rest.py index ecec87ec1..a5b22ddd3 100644 --- a/tests/app/service/test_rest.py +++ b/tests/app/service/test_rest.py @@ -3634,3 +3634,43 @@ def test_get_monthly_notification_data_by_service(sample_service, admin_request) 0, ], ] + + +def test_get_service_notification_statistics_by_day( + admin_request, mocker, sample_service +): + mock_data = [ + { + "notification_type": "email", + "status": "sent", + "day": "2024-11-01", + "count": 10, + }, + { + "notification_type": "sms", + "status": "failed", + "day": "2024-11-02", + "count": 5, + }, + { + "notification_type": "sms", + "status": "delivered", + "day": "2024-11-03", + "count": 11, + }, + ] + + mock_get_service_statistics_for_specific_days = mocker.patch( + "app.service.rest.get_service_statistics_for_specific_days", + return_value=mock_data, + ) + + response = admin_request.get( + "service.get_service_notification_statistics_by_day", + service_id=sample_service.id, + start="2024-11-03", + days="1", + )["data"] + + assert mock_get_service_statistics_for_specific_days.assert_called_once + assert response == mock_data From 1af2a42742e8b324659a4ec650789df70e8cf9e6 Mon Sep 17 00:00:00 2001 From: alexjanousekGSA Date: Thu, 7 Nov 2024 12:24:34 -0500 Subject: [PATCH 36/45] Added section about feature flagging --- docs/all.md | 471 +++++++++++++++++++++++++--------------------------- 1 file changed, 229 insertions(+), 242 deletions(-) diff --git a/docs/all.md b/docs/all.md index 530c7ca43..ccde4ede9 100644 --- a/docs/all.md +++ b/docs/all.md @@ -41,7 +41,7 @@ - [Pull Requests](#pull-requests) - [Getting Started](#getting-started) - [Description](#description) - - [TODO (optional)](#todo-(optional)) + - [TODO (optional)](<#todo-(optional)>) - [Security Considerations](#security-considerations) - [Code Reviews](#code-reviews) - [For the reviewer](#for-the-reviewer) @@ -69,7 +69,6 @@ - [Routes cannot be mapped to destinations in different spaces](#routes-cannot-be-mapped-to-destinations-in-different-spaces) - [API request failed](#api-request-failed) - # Infrastructure overview A diagram of the system is available [in our compliance repo](https://github.com/GSA/us-notify-compliance/blob/main/diagrams/rendered/apps/application.boundary.png). @@ -84,9 +83,9 @@ Application, infrastructure, and compliance work is spread across several reposi ### Application -* [notifications-api](https://github.com/GSA/notifications-api) for the API app -* [notifications-admin](https://github.com/GSA/notifications-admin) for the Admin UI app -* [notifications-utils](https://github.com/GSA/notifications-utils) for common library functions +- [notifications-api](https://github.com/GSA/notifications-api) for the API app +- [notifications-admin](https://github.com/GSA/notifications-admin) for the Admin UI app +- [notifications-utils](https://github.com/GSA/notifications-utils) for common library functions ### Infrastructure @@ -94,17 +93,17 @@ In addition to terraform directories in the api and admin apps above: #### We maintain: -* [usnotify-ssb](https://github.com/GSA/usnotify-ssb) A supplemental service broker that provisions SES and SNS for us -* [ttsnotify-brokerpak-sms](https://github.com/GSA/ttsnotify-brokerpak-sms) The brokerpak defining SNS (SMS sending) +- [usnotify-ssb](https://github.com/GSA/usnotify-ssb) A supplemental service broker that provisions SES and SNS for us +- [ttsnotify-brokerpak-sms](https://github.com/GSA/ttsnotify-brokerpak-sms) The brokerpak defining SNS (SMS sending) #### We use: -* [datagov-brokerpak-smtp](https://github.com/GSA-TTS/datagov-brokerpak-smtp) The brokerpak defining SES -* [cg-egress-proxy](https://github.com/GSA-TTS/cg-egress-proxy/) The caddy proxy that allows external API calls +- [datagov-brokerpak-smtp](https://github.com/GSA-TTS/datagov-brokerpak-smtp) The brokerpak defining SES +- [cg-egress-proxy](https://github.com/GSA-TTS/cg-egress-proxy/) The caddy proxy that allows external API calls ### Compliance -* [us-notify-compliance](https://github.com/GSA/us-notify-compliance) for OSCAL control documentation and diagrams +- [us-notify-compliance](https://github.com/GSA/us-notify-compliance) for OSCAL control documentation and diagrams ## Terraform @@ -116,9 +115,9 @@ Our Terraform configurations manage components via cloud.gov. This means that th There are several remote services required for local development: -* S3 -* SES -* SNS +- S3 +- SES +- SNS Credentials for these services are created by running: @@ -188,24 +187,24 @@ These steps are required for new cloud.gov environments. Local development borro Steps for deploying production from scratch. These can be updated for a new cloud.gov environment by subbing out `prod` or `production` for your desired environment within the steps. 1. Deploy API app - 1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch. - 1. Ensure that the `domain` module is commented out in `terraform/production/main.tf` - 1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production` - 1. Create any necessary DNS records (check `notify-api-ses-production` service credentials for instructions) within https://github.com/18f/dns - 1. Follow the `Steps to prepare SES` below - 1. (Optional) if using a public API route, uncomment the `domain` module and re-trigger a deploy + 1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch. + 1. Ensure that the `domain` module is commented out in `terraform/production/main.tf` + 1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production` + 1. Create any necessary DNS records (check `notify-api-ses-production` service credentials for instructions) within https://github.com/18f/dns + 1. Follow the `Steps to prepare SES` below + 1. (Optional) if using a public API route, uncomment the `domain` module and re-trigger a deploy 1. Deploy Admin app - 1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch. - 1. Ensure that the `api_network_route` and `domain` modules are commented out in `terraform/production/main.tf` - 1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production` - 1. Create DNS records for `domain` module within https://github.com/18f/dns - 1. Uncomment the `api_network_route` and `domain` modules and re-trigger a deploy + 1. Update `terraform-production.yml` and `deploy-prod.yml` to point to the correct space and git branch. + 1. Ensure that the `api_network_route` and `domain` modules are commented out in `terraform/production/main.tf` + 1. Run CI/CD pipeline on the `production` branch by opening a PR from `main` to `production` + 1. Create DNS records for `domain` module within https://github.com/18f/dns + 1. Uncomment the `api_network_route` and `domain` modules and re-trigger a deploy ### Steps to prepare SES 1. After the first deploy of the application with the SSB-brokered SES service completes: - 1. Log into the SES console and navigate to the SNS subscription page. - 1. Select "Request confirmation" for any subscriptions still in "Pending Confirmation" state + 1. Log into the SES console and navigate to the SNS subscription page. + 1. Select "Request confirmation" for any subscriptions still in "Pending Confirmation" state 1. Find and replace instances in the repo of "testsender", "testreceiver" and "dispostable.com", with your origin and destination email addresses, which you verified in step 1 above. TODO: create env vars for these origin and destination email addresses for the root service, and create new migrations to update postgres seed fixtures @@ -217,8 +216,8 @@ TODO: create env vars for these origin and destination email addresses for the r This should be complete for all regions Notify.gov has been deployed to or is currently planned to be deployed to. 1. Visit the SNS console for the region you will be sending from. Notes: - 1. SNS settings are per-region, so each environment must have its own region - 1. Pinpoint and SNS have confusing regional availability, so ensure both are available before submitting any requests. + 1. SNS settings are per-region, so each environment must have its own region + 1. Pinpoint and SNS have confusing regional availability, so ensure both are available before submitting any requests. 1. Choose `Text messaging (SMS)` from the sidebar 1. Click the `Exit SMS Sandbox` button and submit the support request. This request should take at most a day to complete. Be sure to request a higher sending limit at the same time. @@ -243,7 +242,7 @@ If you're using the `cf` CLI, you can run `cf logs notify-api-ENV` and/or `cf lo For general log searching, [the cloud.gov Kibana instance](https://logs.fr.cloud.gov/) is powerful, though quite complex to get started. For shortcuts to errors, some team members have New Relic access. -The links below will open a filtered view with logs from both applications, which can then be filtered further. However, for the links to work, you need to paste them into the URL bar while *already* logged into and viewing the Kibana page. If not, you'll just be redirected to the generic dashboard. +The links below will open a filtered view with logs from both applications, which can then be filtered further. However, for the links to work, you need to paste them into the URL bar while _already_ logged into and viewing the Kibana page. If not, you'll just be redirected to the generic dashboard. Production: https://logs.fr.cloud.gov/app/discover#/view/218a6790-596d-11ee-a43a-090d426b9a38 Demo: https://logs.fr.cloud.gov/app/discover#/view/891392a0-596e-11ee-921a-1b6b2f4d89ed @@ -274,11 +273,13 @@ make test ``` This will run: + - flake8 for code styling - isort for import styling - pytest for the test suite On GitHub, in addition to these tests, we run: + - bandit for code security - pip-audit for dependency vulnerabilities - OWASP for dynamic scanning @@ -293,7 +294,6 @@ In addition to commit-triggered scans, the `daily_checks.yml` workflow runs the Within GitHub Actions, several scans take place every day to ensure security and compliance. - #### [daily-checks.yml](../.github/workflows/daily_checks.yml) `daily-checks.yml` runs `pip-audit`, `bandit`, and `owasp` scans to ensure that any newly found vulnerabilities do not impact notify. Failures should be addressed quickly as they will also block the next attempted deploy. @@ -308,7 +308,7 @@ If you're checking out the system locally, you may want to create a user quickly `poetry run flask command create-test-user` -This will run an interactive prompt to create a user, and then mark that user as active. *Use a real mobile number* if you want to log in, as the SMS auth code will be sent here. +This will run an interactive prompt to create a user, and then mark that user as active. _Use a real mobile number_ if you want to log in, as the SMS auth code will be sent here. ## To run a local OWASP scan @@ -329,7 +329,7 @@ docker run -v $(pwd):/zap/wrk/:rw -t owasp/zap2docker-weekly zap-api-scan.py -t In order to run end-to-end (E2E) tests, which are managed and handled in the admin project, a bit of extra configuration needs to be accounted for here on -the API side as well. These instructions are in the README as they are +the API side as well. These instructions are in the README as they are necessary for project setup, and they're copied here for reference. In the `.env` file, you should see this section: @@ -352,10 +352,18 @@ variable to something else, preferably a lengthy passphrase.** With those two environment variable set, the database migrations will run properly and an E2E test user will be ready to go for use in the admin project. -_Note: Whatever you set these two environment variables to, you'll need to -match their values on the admin side. Please see the admin README and +_Note: Whatever you set these two environment variables to, you'll need to +match their values on the admin side. Please see the admin README and documentation for more details._ +# Feature Flagging + +Feature flagging is now implemented in the Admin application to allow conditional enabling of features. The current setup uses environment variables, which can be configured via the command line with Cloud Foundry (CF). These settings should be defined in each relevant .yml file and committed to source control. + +To adjust a feature flag, update the corresponding environment variable and redeploy as needed. This setup provides flexibility for enabling or disabling features without modifying the core application code. + +Specifics on the commands can be found in the [Admin Feature Flagging readme](https://github.com/GSA/notifications-admin/blob/main/docs/feature-flagging.md). + # Deploying The API has 3 deployment environments, all of which deploy to cloud.gov: @@ -395,7 +403,7 @@ and deploying an updated version of the application throught he normal deploy pr ## Managing environment variables -For an environment variable to make its way into the cloud.gov environment, it *must* end up in the `manifest.yml` file. Based on the deployment approach described above, there are 2 ways for this to happen. +For an environment variable to make its way into the cloud.gov environment, it _must_ end up in the `manifest.yml` file. Based on the deployment approach described above, there are 2 ways for this to happen. ### Secret environment variables @@ -431,9 +439,9 @@ Rules for use: 1. Ensure that no other developer is using the environment, as there is nothing stopping changes from overwriting each other. 1. Clean up when you are done: - - `terraform destroy` from within the `terraform/sandbox` directory will take care of the provisioned services - - Delete the apps and routes shown in `cf apps` by running `cf delete APP_NAME -r` - - Delete the space deployer you created by following the instructions within `terraform/sandbox/secrets.auto.tfvars` + - `terraform destroy` from within the `terraform/sandbox` directory will take care of the provisioned services + - Delete the apps and routes shown in `cf apps` by running `cf delete APP_NAME -r` + - Delete the space deployer you created by following the instructions within `terraform/sandbox/secrets.auto.tfvars` ### Deploying to the sandbox @@ -442,30 +450,35 @@ If this is the first time you have used Terraform in this repository, you will f :anchor: The Admin app depends upon the API app, so set up the API first. 1. Set up services: - ```bash - $ cd terraform/sandbox - $ ../create_service_account.sh -s notify-sandbox -u -terraform -m > secrets.auto.tfvars - $ terraform init - $ terraform plan - $ terraform apply - ``` - Check [Terraform troubleshooting](https://github.com/GSA/notifications-api/tree/main/terraform#troubleshooting) if you encounter problems. + ```bash + $ cd terraform/sandbox + $ ../create_service_account.sh -s notify-sandbox -u -terraform -m > secrets.auto.tfvars + $ terraform init + $ terraform plan + $ terraform apply + ``` + Check [Terraform troubleshooting](https://github.com/GSA/notifications-api/tree/main/terraform#troubleshooting) if you encounter problems. 1. Change back to the project root directory: `cd ../..` 1. Start a poetry shell as a shortcut to load `.env` file variables by running `poetry shell`. (You'll have to restart this any time you change the file.) 1. Output requirements.txt file: `poetry export --without-hashes --format=requirements.txt > requirements.txt` 1. Ensure you are using the correct CloudFoundry target - ```bash - cf target -o gsa-tts-benefits-studio -s notify-sandbox - ``` + +```bash +cf target -o gsa-tts-benefits-studio -s notify-sandbox +``` + 1. Deploy the application: - ```bash - cf push --vars-file deploy-config/sandbox.yml --var NEW_RELIC_LICENSE_KEY=$NEW_RELIC_LICENSE_KEY - ``` - The real `push` command has more var arguments than the single one above. Get their values from a Notify team member. + +```bash +cf push --vars-file deploy-config/sandbox.yml --var NEW_RELIC_LICENSE_KEY=$NEW_RELIC_LICENSE_KEY +``` + +The real `push` command has more var arguments than the single one above. Get their values from a Notify team member. 1. Visit the URL(s) of the app you just deployed - * Admin https://notify-sandbox.app.cloud.gov/ - * API https://notify-api-sandbox.app.cloud.gov/ + +- Admin https://notify-sandbox.app.cloud.gov/ +- API https://notify-api-sandbox.app.cloud.gov/ # Database management @@ -527,7 +540,6 @@ Running on cloud.gov: cf run-task notify-api --command "flask command purge_functional_test_data -u " ``` - # One-off tasks For these, we're using Flask commands, which live in [`/app/commands.py`](../app/commands.py). @@ -545,7 +557,7 @@ To run a command on cloud.gov, use this format: `cf run-task CLOUD-GOV-APP --commmand "YOUR COMMAND HERE" --name YOUR-COMMAND-NAME` -**NOTE:** Do not include `poetry run` in the command you provide for `cf run-task`! cloud.gov is already aware +**NOTE:** Do not include `poetry run` in the command you provide for `cf run-task`! cloud.gov is already aware of the Python virtual environment and Python dependencies; it's all handled through the Python brokerpak we use to deploy the application. @@ -573,16 +585,17 @@ cf run-task --command "flask command upda All commands use the `-g` or `--generate` to determine how many instances to load to the db. The `-g` or `--generate` option is required and will always defult to 1. An example: `flask command add-test-uses-to-db -g 6` will generate 6 random users and insert them into the db. ## Test commands list + - `add-test-organizations-to-db` - `add-test-services-to-db` - `add-test-jobs-to-db` - `add-test-notifications-to-db` - `add-test-users-to-db` (extra options include `-s` or `--state` and `-d` or `--admin`) - # How messages are queued and sent Services used during message-send flow: + 1. AWS S3 2. AWS SNS 3. AWS Cloudwatch @@ -614,9 +627,8 @@ Public APIs are intended for use by services and are all located under `app/v2/` New and existing APIs should be documented within [openapi.yml](./openapi.yml). Tools to help with editing this file: -* [OpenAPI Editor for VSCode](https://marketplace.visualstudio.com/items?itemName=42Crunch.vscode-openapi) -* [OpenAPI specification](https://spec.openapis.org/oas/v3.0.2) - +- [OpenAPI Editor for VSCode](https://marketplace.visualstudio.com/items?itemName=42Crunch.vscode-openapi) +- [OpenAPI specification](https://spec.openapis.org/oas/v3.0.2) ## New APIs @@ -673,8 +685,7 @@ An API key can be created at https://HOSTNAME/services/YOUR_SERVICE_ID/api/keys. ## Postman Documentation -Internal-only [documentation for exploring the API using Postman](https://docs.google.com/document/d/1S5c-LxuQLhAtZQKKsECmsllVGmBe34Z195sbRVEzUgw/edit#heading=h.134fqdup8d3m) - +Internal-only [documentation for exploring the API using Postman](https://docs.google.com/document/d/1S5c-LxuQLhAtZQKKsECmsllVGmBe34Z195sbRVEzUgw/edit#heading=h.134fqdup8d3m) ## Using OpenAPI documentation @@ -710,13 +721,12 @@ Because jwt tokens expire so quickly, the development server can be set to allow env ALLOW_EXPIRED_API_TOKEN=1 make run-flask ``` - - # Queues and tasks The API puts tasks into Celery queues for dispatch. There are a bunch of queues: + - priority tasks - database tasks - send sms tasks @@ -735,6 +745,7 @@ There are a bunch of queues: - save api sms tasks And these tasks: + - check for missing rows in completed jobs - check for services with high failure rates or sending to tv numbers - check if letters still in created @@ -803,12 +814,9 @@ After scheduling some tasks, run celery beat to get them moving: make run-celery-beat ``` +# Notify.gov -Notify.gov -========= - -System Description ------------------- +## System Description Notify.gov is a service being developed by the TTS Public Benefits Studio to increase the availability of SMS and email notifications to Federal, State, and Local Benefits agencies. @@ -820,77 +828,74 @@ or services. The templates are sent by the agency using one of two methods: -* using the Notify.gov API to send a message to a given recipient with given personalization values -* using the Notify.gov website to upload a CSV file of recipients and their personalization values, one row per message +- using the Notify.gov API to send a message to a given recipient with given personalization values +- using the Notify.gov website to upload a CSV file of recipients and their personalization values, one row per message ### Environment Notify.gov is comprised of two applications both running on cloud.gov: -* Admin, a Flask website running on the python_buildpack which hosts agency user-facing UI -* API, a Flask application running on the python_buildpack hosting the Notify.gov API +- Admin, a Flask website running on the python_buildpack which hosts agency user-facing UI +- API, a Flask application running on the python_buildpack hosting the Notify.gov API Notify.gov utilizes several cloud.gov-provided services: -* S3 buckets for temporary file storage -* Elasticache (redis) for cacheing data and enqueueing background tasks -* RDS (PostgreSQL) for system data storage +- S3 buckets for temporary file storage +- Elasticache (redis) for cacheing data and enqueueing background tasks +- RDS (PostgreSQL) for system data storage Notify.gov also provisions and uses two AWS services via a [supplemental service broker](https://github.com/GSA/usnotify-ssb): -* [SNS](https://aws.amazon.com/sns/) for sending SMS messages -* [SES](https://aws.amazon.com/ses/) for sending email messages +- [SNS](https://aws.amazon.com/sns/) for sending SMS messages +- [SES](https://aws.amazon.com/ses/) for sending email messages For further details of the system and how it connects to supporting services, see the [application boundary diagram](https://github.com/GSA/us-notify-compliance/blob/main/diagrams/rendered/apps/application.boundary.png) - -Pull Requests -============= +# Pull Requests Changes are made to our applications via pull requests, which show a diff (the before and after state of all proposed changes in the code) of of the work -done for that particular branch. We use pull requests as the basis for working +done for that particular branch. We use pull requests as the basis for working on Notify.gov and modifying the application over time for improvements, bug fixes, new features, and more. There are several things that make for a good and complete pull request: -* An appropriate and descriptive title -* A detailed description of what's being changed, including any outstanding work +- An appropriate and descriptive title +- A detailed description of what's being changed, including any outstanding work (TODOs) -* A list of security considerations, which contains information about anything +- A list of security considerations, which contains information about anything we need to be mindful of from a security compliance perspective -* The proper labels, assignee, code reviewer, and other project metadata set - +- The proper labels, assignee, code reviewer, and other project metadata set ### Getting Started When you first open a pull request, start off by making sure the metadata for it is in place: -* Provide an appropriate and descriptive title for the pull request -* Link the pull request to its corresponding issue (must be done after creating +- Provide an appropriate and descriptive title for the pull request +- Link the pull request to its corresponding issue (must be done after creating the pull request itself) -* Assign yourself as the author -* Attach the appropriate labels to it -* Set it to be on the Notify.gov project board -* Select one or more reviewers from the team or mark the pull request as a draft +- Assign yourself as the author +- Attach the appropriate labels to it +- Set it to be on the Notify.gov project board +- Select one or more reviewers from the team or mark the pull request as a draft depending on its current state - * If the pull request is a draft, please be sure to add reviewers once it is - ready for review and mark it ready for review + - If the pull request is a draft, please be sure to add reviewers once it is + ready for review and mark it ready for review ### Description Please enter a clear description about your proposed changes and what the -expected outcome(s) is/are from there. If there are complex implementation +expected outcome(s) is/are from there. If there are complex implementation details within the changes, this is a great place to explain those details using plain language. This should include: -* Links to issues that this PR addresses (especially if more than one) -* Screenshots or screen captures of any visible changes, especially for UI work -* Dependency changes +- Links to issues that this PR addresses (especially if more than one) +- Screenshots or screen captures of any visible changes, especially for UI work +- Dependency changes If there are any caveats, known issues, follow-up items, etc., make a quick note of them here as well, though more details are probably warranted in the issue @@ -900,46 +905,44 @@ itself in this case. If you're opening a draft PR, it might be helpful to list any outstanding work, especially if you're asking folks to take a look before it's ready for full -review. In this case, create a small checklist with the outstanding items: +review. In this case, create a small checklist with the outstanding items: -* [ ] TODO item 1 -* [ ] TODO item 2 -* [ ] TODO item ... +- [ ] TODO item 1 +- [ ] TODO item 2 +- [ ] TODO item ... ### Security Considerations Please think about the security compliance aspect of your changes and what the potential impacts might be. -**NOTE: Please be mindful of sharing sensitive information here! If you're not sure of what to write, please ask the team first before writing anything here.** +**NOTE: Please be mindful of sharing sensitive information here! If you're not sure of what to write, please ask the team first before writing anything here.** Relevant details could include (and are not limited to) the following: -* Handling secrets/credential management (or specifically calling out that there +- Handling secrets/credential management (or specifically calling out that there is nothing to handle) -* Any adjustments to the flow of data in and out the system, or even within it -* Connecting or disconnecting any external services to the application -* Handling of any sensitive information, such as PII -* Handling of information within log statements or other application monitoring +- Any adjustments to the flow of data in and out the system, or even within it +- Connecting or disconnecting any external services to the application +- Handling of any sensitive information, such as PII +- Handling of information within log statements or other application monitoring services/hooks -* The inclusion of a new external dependency or the removal of an existing one -* ... (anything else relevant from a security compliance perspective) +- The inclusion of a new external dependency or the removal of an existing one +- ... (anything else relevant from a security compliance perspective) There are some cases where there are no security considerations to be had, e.g., -updating our documentation with publicly available information. In those cases +updating our documentation with publicly available information. In those cases it is fine to simply put something like this: -* None; this is a documentation update with publicly available information. +- None; this is a documentation update with publicly available information. This way it shows that we still gave this section consideration and that nothing happens to apply in this scenario. - -Code Reviews -============ +# Code Reviews When conducting a code review there are several things to keep in mind to ensure -a quality and valuable review. Remember, we're trying to improve Notify.gov as +a quality and valuable review. Remember, we're trying to improve Notify.gov as best we can; it does us no good if we do not double check that our work meets our standards, especially before going out the door! @@ -957,36 +960,36 @@ the reviewer and the author. ### For the reviewer When performing a code review, please be curious and critical while also being -respectful and appreciative of the work submitted. Code reviews are a chance +respectful and appreciative of the work submitted. Code reviews are a chance to check that things meet our standards and provide learning opportunities. They are not places for belittling or disparaging someone's work or approach to a task, and absolutely not the person(s) themselves. That said, any responses to the code review should also be respectful and -considerate. Remember, this is a chance to not only improve our work and the +considerate. Remember, this is a chance to not only improve our work and the state of Notify.gov, it's also a chance to learn something new! **Note: If a response is condescending, derogatory, disrespectful, etc., please do not hesitate to either speak with the author(s) directly about this or reach -out to a team lead/supervisor for additional help to rectify the issue. Such +out to a team lead/supervisor for additional help to rectify the issue. Such behavior and lack of professionalism is not acceptable or tolerated.** When performing a code review, it is helpful to keep the following guidelines in mind: -* Be on the lookout for any sensitive information and/or leaked credentials, +- Be on the lookout for any sensitive information and/or leaked credentials, secrets, PII, etc. -* Ask and call out things that aren't clear to you; it never hurts to double +- Ask and call out things that aren't clear to you; it never hurts to double check your understanding of something! -* Check that things are named descriptively and appropriately and call out +- Check that things are named descriptively and appropriately and call out anything that is not. -* Check that comments are present for complex areas when needed. -* Make sure the pull request itself is properly prepared - it has a clear +- Check that comments are present for complex areas when needed. +- Make sure the pull request itself is properly prepared - it has a clear description, calls out security concerns, and has the necessary labels, flags, issue link, etc., set on it. -* Do not be shy about using the suggested changes feature in GitHub pull request +- Do not be shy about using the suggested changes feature in GitHub pull request comments; this can help save a lot of time! -* Do not be shy about marking a review with the `Request Changes` status - yes, +- Do not be shy about marking a review with the `Request Changes` status - yes, it looks big and red when it shows up, but this is completely fine and not to be taken as a personal mark against the author(s) of the pull request! @@ -999,40 +1002,38 @@ This can save folks a lot of misunderstanding and back-and-forth! When receiving a code review, please remember that someone took the time to look over all of your work with a critical eye to make sure our standards are being -met and that we're producing the best quality work possible. It's completely +met and that we're producing the best quality work possible. It's completely fine if there are specific changes requested and/or other parts are sent back for additional work! That said, the review should also be respectful, helpful, and a learning -opportunity where possible. Remember, this is a chance to not only improve your +opportunity where possible. Remember, this is a chance to not only improve your work and the state of Notify.gov, it's also a chance to learn something new! **Note: If a review is condescending, derogatory, disrespectful, etc., please do not hesitate to either speak with the reviewer(s) directly about this or reach -out to a team lead/supervisor for additional help to rectify the issue. Such +out to a team lead/supervisor for additional help to rectify the issue. Such behavior and lack of professionalism is not acceptable or tolerated.** When going over a review, it may be helpful to keep these perspectives in mind: -* Approach the review with an open mind, curiosity, and appreciation. -* If anything the reviewer(s) mentions is unclear to you, please ask for +- Approach the review with an open mind, curiosity, and appreciation. +- If anything the reviewer(s) mentions is unclear to you, please ask for clarification and engage them in further dialogue! -* If you disagree with a suggestion or request, please say so and engage in an +- If you disagree with a suggestion or request, please say so and engage in an open and respecful dialogue to come to a mutual understanding of what the appropriate next step(S) should be - accept the change, reject the change, take a different path entirely, etc. -* If there are no issues with any suggested edits or requested changes, make +- If there are no issues with any suggested edits or requested changes, make the necessary adjustments and let the reviewer(s) know when the work is ready for review again. Additionally, if you find yourself responding to a lot of things and questioning the feedback received throughout much of the code review, it will likely be helpful to schedule time to speak with the reviewer(s) directly and talk through -everything. This can save folks a lot of misunderstanding and back-and-forth! +everything. This can save folks a lot of misunderstanding and back-and-forth! - -Run Book -======== +# Run Book Policies and Procedures needed before and during Notify.gov Operations. Many of these policies are taken from the Notify.gov System Security & Privacy Plan (SSPP). @@ -1060,9 +1061,8 @@ Operational alerts are posted to the [#pb-notify-alerts](https://gsa-tts.slack.c In addition to the application logs, there are several tables in the application that store useful information for audit logging purposes: -* `events` -* the various `*_history` tables - +- `events` +- the various `*_history` tables ## Restaging Apps @@ -1098,7 +1098,6 @@ When `ssb-devel-sms` and/or `ssb-devel-smtp` need to be restaged: 1. Select the `development` environment from the dropdown 1. Click `Run workflow` within the popup - ## Deploying to Production Deploying to production involves 3 steps that must be done in order, and can be done for just the API, just the Admin, or both at the same time: @@ -1111,25 +1110,25 @@ Additionally, you may have to monitor the GitHub Actions as they take place to t ### Create a new pull request -This is done entirely in GitHub. First, go to the pull requests section of the API and/or Admin repository, then click on the `New pull request` button. +This is done entirely in GitHub. First, go to the pull requests section of the API and/or Admin repository, then click on the `New pull request` button. -In the screen that appears, change the `base: main` target branch on the left side of the arrow to `base: production` instead. You want to merge all of the latest changes in `main` to the `production` branch. After you've made the switch, click on the `Create pull request` button. +In the screen that appears, change the `base: main` target branch on the left side of the arrow to `base: production` instead. You want to merge all of the latest changes in `main` to the `production` branch. After you've made the switch, click on the `Create pull request` button. When the pull request details page appears, you'll need to set a few things: Title: ` Production Deploy`, e.g., `9/9/2024 Production Deploy` -Description: feel free to copy from a previous production deploy PR; note that you'll have to change the links to the release notes if applicable! -Labels: `Engineering` -Author: set to yourself -Reviewers: assign folks or the @notify-contributors team +Description: feel free to copy from a previous production deploy PR; note that you'll have to change the links to the release notes if applicable! +Labels: `Engineering` +Author: set to yourself +Reviewers: assign folks or the @notify-contributors team Please link it to the project board as well, then click on the `Create pull request` button to finalize it all. ### Create a new release tag -On the main page of the repository, click on the small heading that says `Releases` on the right to get to the release listing page. Once there, click on the `Draft a new release` button. +On the main page of the repository, click on the small heading that says `Releases` on the right to get to the release listing page. Once there, click on the `Draft a new release` button. -You'll first have to choose a tag or create a new one: use the current date as the tag name, e.g., `9/9/2024`. Keep the target set to `main` and then click on the `Generate release notes button`. +You'll first have to choose a tag or create a new one: use the current date as the tag name, e.g., `9/9/2024`. Keep the target set to `main` and then click on the `Generate release notes button`. Add a title in the format of `` Production Deploy, e.g., `9/9/2024 Production Deploy`. @@ -1139,17 +1138,16 @@ Once everything is complete, cick on the `Publish release` button and then link ### Review and approve the pull request(s) -When everything is good to go, two people will need to approve the pull request for merging into the `production` branch. Once they do, then merge the pull request. +When everything is good to go, two people will need to approve the pull request for merging into the `production` branch. Once they do, then merge the pull request. -At this point everything is mostly automatic. The deploy will update both the `demo` and `production` environments. Once the deploys are done and successful, go back into the pre-release release notes and switch the checkboxes to turn it into the latest release and save the change. +At this point everything is mostly automatic. The deploy will update both the `demo` and `production` environments. Once the deploys are done and successful, go back into the pre-release release notes and switch the checkboxes to turn it into the latest release and save the change. ### Troubleshooting production deploys -Sometimes a deploy will fail and you will have to look at the GitHub Action deployment logs to see what the cause is. In many cases it will be an out of memory error because of the two environments going out at the same time. Whenever the successful deploy is finished, re-run the failed jobs in the other deployment action again. +Sometimes a deploy will fail and you will have to look at the GitHub Action deployment logs to see what the cause is. In many cases it will be an out of memory error because of the two environments going out at the same time. Whenever the successful deploy is finished, re-run the failed jobs in the other deployment action again. Once the deploys are finished it's also a good idea to just poke around the site to make sure things are working fine and as expected! - ## Smoke-testing the App To ensure that notifications are passing through the application properly, the following steps can be taken to ensure all parts are operating correctly: @@ -1164,21 +1162,20 @@ Assuming that you have followed all steps to set up localstack successfully (see 1. Create an sms template that requires no inputs from the user (i.e. the csv file will only have phone numbers) 2. Uncomment the test 'test_generate_csv_for_bulk_testing' in app/test_utils.py -3. Run `make test` on this project. This will generate the csv file for the bulk test. +3. Run `make test` on this project. This will generate the csv file for the bulk test. 4. If you are not a platform admin for your service when you run locally, do the following: - - >psql -d notification_api + - > psql -d notification_api - update users set platform_admin='t'; - \q - sign out - sign in. - Go to settings and set the organization for your service to 'Broadcast services' (scroll down to platform admin) - Go to settings and set your service to 'live' (scroll down to platform admin) -5. Run your app 'locally'. I.e. run `make run-procfile` on this project and `make run-flask` on the admin project -6. Sign in. Verify you are running with localstack. I.e., you do NOT receive a text message on sign in. Instead, +5. Run your app 'locally'. I.e. run `make run-procfile` on this project and `make run-flask` on the admin project +6. Sign in. Verify you are running with localstack. I.e., you do NOT receive a text message on sign in. Instead, you see your authentication code in green in the api logs 7. Go to send messages and upload your csv file and send your 100000 messages - ## Configuration Management Also known as: **How to move code from my machine to production** @@ -1214,11 +1211,11 @@ Also known as: **How to move code from my machine to production** US_Notify Administrators are responsible for ensuring that remediations for vulnerabilities are implemented. Response times vary based on the level of vulnerability as follows: -* Critical (Very High) - 15 days -* High - 30 days -* Medium - 90 days -* Low - 180 days -* Informational - 365 days (depending on the analysis of the issue) +- Critical (Very High) - 15 days +- High - 30 days +- Medium - 90 days +- Low - 180 days +- Informational - 365 days (depending on the analysis of the issue) ## DNS Changes @@ -1244,13 +1241,12 @@ Notify.gov DNS records are maintained within [the 18f/dns repository](https://gi ## Rotating the DANGEROUS_SALT - - 1. Start API locally `make run-procfile` - 2. In a separate terminal tab, navigate to the API project and run `poetry run flask command generate-salt` - 3. A random secret will appear in the tab - 4. Go to github->settings->secrets and variables->actions in the admin project and find the DANGEROUS_SALT secret for the admin project for staging. Open it and paste the result of #3 into the secret and save. Repeat for the API project, for staging. - 5. Repeat #3 and #4 but do it for demo - 6. Repeat #3 and #4 but do it for production +1. Start API locally `make run-procfile` +2. In a separate terminal tab, navigate to the API project and run `poetry run flask command generate-salt` +3. A random secret will appear in the tab +4. Go to github->settings->secrets and variables->actions in the admin project and find the DANGEROUS_SALT secret for the admin project for staging. Open it and paste the result of #3 into the secret and save. Repeat for the API project, for staging. +5. Repeat #3 and #4 but do it for demo +6. Repeat #3 and #4 but do it for production The important thing is to use the same secret for Admin and API on each tier--i.e. you only generate three secrets. @@ -1278,40 +1274,40 @@ The important thing is to use the same secret for Admin and API on each tier--i. Important policies: -* Infrastructure Accounts and Application Platform Administrators must be approved by the System Owner (Amy) before creation, but people with `Administrator` role can actually do the creation and role assignments. -* At least one agency partner must act as the `User Manager` for their service, with permissions to manage their team according to their agency's policies and procedures. -* All users must utilize `.gov` email addresses. -* Users who leave the team or otherwise have role changes must have their accounts updated to reflect the new roles required (or disabled) within 14 days. -* SpaceDeployer credentials must be rotated within 14 days of anyone with SpaceDeveloper cloud.gov access leaving the team. -* A user report must be created annually (See AC-2(j)). `make cloudgov-user-report` can be used to create a full report of all cloud.gov users. +- Infrastructure Accounts and Application Platform Administrators must be approved by the System Owner (Amy) before creation, but people with `Administrator` role can actually do the creation and role assignments. +- At least one agency partner must act as the `User Manager` for their service, with permissions to manage their team according to their agency's policies and procedures. +- All users must utilize `.gov` email addresses. +- Users who leave the team or otherwise have role changes must have their accounts updated to reflect the new roles required (or disabled) within 14 days. +- SpaceDeployer credentials must be rotated within 14 days of anyone with SpaceDeveloper cloud.gov access leaving the team. +- A user report must be created annually (See AC-2(j)). `make cloudgov-user-report` can be used to create a full report of all cloud.gov users. ### Types of Infrastructure Users -| Role Name | System | Permissions | Who | Responsibilities | -| --------- | ------ | ----------- | --- | ---------------- | -| Administrator | GitHub | Admin | PBS Fed | Approve & Merge PRs into main and production | -| Administrator | AWS | `NotifyAdministrators` IAM UserGroup | PBS Fed | Read audit logs, verify & fix any AWS service issues within Production AWS account | -| Administrator | Cloud.gov | `OrgManager` | PBS Fed | Manage cloud.gov roles and permissions. Access to production spaces | -| DevOps Engineer | Cloud.gov | `SpaceManager` | PBS Fed or Contractor | Access to non-production spaces | -| DevOps Engineer | AWS | `NotifyAdministrators` IAM UserGroup | PBS Fed or Contractor | Access to non-production AWS accounts to verify & fix any AWS issues in the lower environments | -| Engineer | GitHub | Write | PBS Fed or Contractor | Write code & issues, submit PRs | +| Role Name | System | Permissions | Who | Responsibilities | +| --------------- | --------- | ------------------------------------ | --------------------- | ---------------------------------------------------------------------------------------------- | +| Administrator | GitHub | Admin | PBS Fed | Approve & Merge PRs into main and production | +| Administrator | AWS | `NotifyAdministrators` IAM UserGroup | PBS Fed | Read audit logs, verify & fix any AWS service issues within Production AWS account | +| Administrator | Cloud.gov | `OrgManager` | PBS Fed | Manage cloud.gov roles and permissions. Access to production spaces | +| DevOps Engineer | Cloud.gov | `SpaceManager` | PBS Fed or Contractor | Access to non-production spaces | +| DevOps Engineer | AWS | `NotifyAdministrators` IAM UserGroup | PBS Fed or Contractor | Access to non-production AWS accounts to verify & fix any AWS issues in the lower environments | +| Engineer | GitHub | Write | PBS Fed or Contractor | Write code & issues, submit PRs | ### Types of Application Users -| Role Name | Permissions | Who | Responsibilities | -| --------- | ----------- | --- | ---------------- | -| Platform Administrator | `platform_admin` | PBS Fed | Administer system settings within Notify.gov across Services | -| User Manager | `MANAGE_USERS` | Agency Partner | Manage service team members | -| User | any except `MANAGE_USERS` | Agency Partner | Use Notify.gov | +| Role Name | Permissions | Who | Responsibilities | +| ---------------------- | ------------------------- | -------------- | ------------------------------------------------------------ | +| Platform Administrator | `platform_admin` | PBS Fed | Administer system settings within Notify.gov across Services | +| User Manager | `MANAGE_USERS` | Agency Partner | Manage service team members | +| User | any except `MANAGE_USERS` | Agency Partner | Use Notify.gov | ### Service Accounts -| Role Name | System | Permissions | Notes | -| --------- | ------ | ----------- | ----- | -| Cloud.gov Service Account | Cloud.gov | `OrgManager` and `SpaceDeveloper` | Creds stored in GitHub Environment secrets within api and admin app repos | -| SSB Deployment Account | AWS | `IAMFullAccess` | Creds stored in GitHub Environment secrets within usnotify-ssb repo | -| SSB Cloud.gov Service Account | Cloud.gov | `SpaceDeveloper` | Creds stored in GitHub Environment secrets within usnotify-ssb repo | -| SSB AWS Accounts | AWS | `sms_broker` or `smtp_broker` IAM role | Creds created and maintained by usnotify-ssb terraform | +| Role Name | System | Permissions | Notes | +| ----------------------------- | --------- | -------------------------------------- | ------------------------------------------------------------------------- | +| Cloud.gov Service Account | Cloud.gov | `OrgManager` and `SpaceDeveloper` | Creds stored in GitHub Environment secrets within api and admin app repos | +| SSB Deployment Account | AWS | `IAMFullAccess` | Creds stored in GitHub Environment secrets within usnotify-ssb repo | +| SSB Cloud.gov Service Account | Cloud.gov | `SpaceDeveloper` | Creds stored in GitHub Environment secrets within usnotify-ssb repo | +| SSB AWS Accounts | AWS | `sms_broker` or `smtp_broker` IAM role | Creds created and maintained by usnotify-ssb terraform | ## SMS Phone Number Management @@ -1319,48 +1315,44 @@ See [Infrastructure Overview](./infra-overview.md#request-new-phone-numbers) for Once you have a number, it must be set in the app in one of two ways: -* For the default phone number, to be used by Notify itself for OTP codes and the default from number for services, set the phone number as the `AWS_US_TOLL_FREE_NUMBER` ENV variable in the environment you are creating -* For service-specific phone numbers, set the phone number in the Service's `Text message senders` in the settings tab. +- For the default phone number, to be used by Notify itself for OTP codes and the default from number for services, set the phone number as the `AWS_US_TOLL_FREE_NUMBER` ENV variable in the environment you are creating +- For service-specific phone numbers, set the phone number in the Service's `Text message senders` in the settings tab. ### Current Production Phone Numbers -* +18447952263 - in use as default number. Notify's OTP messages and trial service messages are sent from this number (Also the number for the live service: Federal Test Service) -* +18447891134 - Montgomery County / Ride On -* +18888402596 - Norfolk / DHS -* +18555317292 - Washington State / DHS -* +18889046435 - State Department / Consular Affairs -* +18447342791 -* +18447525067 -* +18336917230 -* +18335951552 -* +18333792033 -* +18338010522 +- +18447952263 - in use as default number. Notify's OTP messages and trial service messages are sent from this number (Also the number for the live service: Federal Test Service) +- +18447891134 - Montgomery County / Ride On +- +18888402596 - Norfolk / DHS +- +18555317292 - Washington State / DHS +- +18889046435 - State Department / Consular Affairs +- +18447342791 +- +18447525067 +- +18336917230 +- +18335951552 +- +18333792033 +- +18338010522 For a full list of phone numbers in trial and production, team members can access a [tracking list here](https://docs.google.com/spreadsheets/d/1lq3Wi_up7EkcKvmwO3oTw30m7kVt1iXvdS3KAp0smh4/edit#gid=0). +# Data Storage Policies & Procedures -Data Storage Policies & Procedures -================================== - - -Potential PII Locations ------------------------ +## Potential PII Locations ### Tables #### users1 -* name -* email_address -* mobile_number +- name +- email_address +- mobile_number #### invited_users1 -* email_address +- email_address #### invited_organization_users1 -* email_address +- email_address #### jobs @@ -1368,23 +1360,23 @@ No db data is PII, but each job has a csv file in s3 containing phone numbers an #### notifications -* to -* normalized_to -* _personalization2 -* phone_prefix3 +- to +- normalized_to +- \_personalization2 +- phone_prefix3 #### notification_history -* phone_prefix3 +- phone_prefix3 #### inbound_sms -* content2 -* user_number +- content2 +- user_number #### events -* data (contains user IP addresses)1 +- data (contains user IP addresses)1 ### Notes @@ -1402,9 +1394,7 @@ Details on encryption schemes and algorithms can be found in [SC-28(1)](https:// Probably not PII, this is the country code of the phone. - -Data Retention Policy ---------------------- +## Data Retention Policy Seven (7) days by default. Each service can be set with a custom policy via `ServiceDataRetention` by a Platform Admin. The `ServiceDataRetention` setting applies per-service and per-message type and controls both entries in the `notifications` table as well as `csv` contact files uploaded to s3 @@ -1414,21 +1404,18 @@ Data cleanup is controlled by several tasks in the `nightly_tasks.py` file, kick ## Debug messages not being sent - ### Getting the file location and tracing what happens +Ask the user to provide the csv file name. Either the csv file they uploaded, or the one that is autogenerated when they do a one-off send and is visible in the UI -Ask the user to provide the csv file name. Either the csv file they uploaded, or the one that is autogenerated when they do a one-off send and is visible in the UI +Starting with the admin logs, search for this file name. When you find it, the log line should have the file name linked to the job_id and the csv file location. Save both of these. -Starting with the admin logs, search for this file name. When you find it, the log line should have the file name linked to the job_id and the csv file location. Save both of these. - -In the api logs, search by job_id. Either you will see evidence of the job failing and retrying over and over (in which case search for a stack trace using timestamp), or you will ultimately get to a log line that links the job_id to a message_id. In this case, now search by message_id. You should be able to find the actual result from AWS, either success or failure, with hopefully some helpful info. +In the api logs, search by job_id. Either you will see evidence of the job failing and retrying over and over (in which case search for a stack trace using timestamp), or you will ultimately get to a log line that links the job_id to a message_id. In this case, now search by message_id. You should be able to find the actual result from AWS, either success or failure, with hopefully some helpful info. ### Viewing the csv file If you need to view the questionable csv file on production, run the following command: - ``` cf run-task notify-api-production --command "flask command download-csv-file-by-name -f " ``` @@ -1443,10 +1430,10 @@ poetry run flask command download-csv-file-by-name 1. Either send a message and capture the csv file name, or get a csv file name from a user 2. Using the log tool at logs.fr.cloud.gov, use filters to limit what you're searching on (cf.app is 'notify-admin-production' for example) and then search with the csv file name in double quotes over the relevant time period (last 5 minutes if you just sent a message, or else whatever time the user sent at) -3. When you find the log line, you should also find the job_id and the s3 file location. Save these somewhere. -4. To get the csv file contents, you can run the command above. This command currently prints to the notify-api log, so after you run the command, -you need to search in notify-api-production for the last 5 minutes with the logs sorted by timestamp. The contents of the csv file unfortunately appear on separate lines so it's very important to sort by time. -5. If you want to see where the message actually failed, search with cf.app is notify-api-production using the job_id that you saved in step #3. If you get far enough, you might see one of the log lines has a message_id. If you see it, you can switch and search on that, which should tell you what happened in AWS (success or failure). +3. When you find the log line, you should also find the job_id and the s3 file location. Save these somewhere. +4. To get the csv file contents, you can run the command above. This command currently prints to the notify-api log, so after you run the command, + you need to search in notify-api-production for the last 5 minutes with the logs sorted by timestamp. The contents of the csv file unfortunately appear on separate lines so it's very important to sort by time. +5. If you want to see where the message actually failed, search with cf.app is notify-api-production using the job_id that you saved in step #3. If you get far enough, you might see one of the log lines has a message_id. If you see it, you can switch and search on that, which should tell you what happened in AWS (success or failure). ## Deployment / app push problems From d89f1206d57a2d5fca693e347d105da0ebee54c4 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 09:38:21 -0800 Subject: [PATCH 37/45] put max connections back to 10 --- app/clients/__init__.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/app/clients/__init__.py b/app/clients/__init__.py index 88565bd22..3392928e4 100644 --- a/app/clients/__init__.py +++ b/app/clients/__init__.py @@ -14,11 +14,7 @@ }, use_fips_endpoint=True, # This is the default but just for doc sake - # there may come a time when increasing this helps - # with job cache management. - # max_pool_connections=10, - # Reducing to 7 connections due to BrokenPipeErrors - max_pool_connections=7, + max_pool_connections=10, ) From 17337dfc7271f94b05d11c1c7b25152c4ac8abd9 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 13:14:43 -0800 Subject: [PATCH 38/45] add debugging --- app/clients/sms/aws_sns.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index 8b5d6c963..cc891c031 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -64,11 +64,15 @@ def send_sms(self, to, content, reference, sender=None, international=False): } if self._valid_sender_number(sender): + self.current_app.logger.info("aws_sns found a valid sender number!") attributes["AWS.MM.SMS.OriginationNumber"] = { "DataType": "String", "StringValue": sender, } else: + self.current_app.logger.info( + "aws_sns did not find a valid sender number, defaulting to the toll free one" + ) attributes["AWS.MM.SMS.OriginationNumber"] = { "DataType": "String", "StringValue": self.current_app.config["AWS_US_TOLL_FREE_NUMBER"], From 767f94a26c3f295307451fee0bff19d69d8ddf0c Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 13:18:05 -0800 Subject: [PATCH 39/45] add debugging --- app/clients/sms/aws_sns.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index cc891c031..ed08ef5df 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -64,7 +64,13 @@ def send_sms(self, to, content, reference, sender=None, international=False): } if self._valid_sender_number(sender): - self.current_app.logger.info("aws_sns found a valid sender number!") + self.current_app.logger.info( + "aws_sns found a valid sender number here it is wait for it!" + ) + # To defeat scrubbing, sender numbers are not PII. + for number in sender: + self.current_app.logger.info(number) + attributes["AWS.MM.SMS.OriginationNumber"] = { "DataType": "String", "StringValue": sender, From 5cd0e1e523e7e9dbc6d0ada08b5ff10550edc60a Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 13:29:59 -0800 Subject: [PATCH 40/45] put number all on one line --- app/clients/sms/aws_sns.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index ed08ef5df..ddc0629cb 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -68,8 +68,8 @@ def send_sms(self, to, content, reference, sender=None, international=False): "aws_sns found a valid sender number here it is wait for it!" ) # To defeat scrubbing, sender numbers are not PII. - for number in sender: - self.current_app.logger.info(number) + non_scrubbable = " ".join(sender) + self.current_app.logger.info(non_scrubbable) attributes["AWS.MM.SMS.OriginationNumber"] = { "DataType": "String", From 2a8b1374b03caf9adeb12b3de84d0a5a2c6c0373 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 14:40:18 -0800 Subject: [PATCH 41/45] put number all on one line --- app/clients/sms/aws_sns.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index ddc0629cb..ddd406590 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -63,13 +63,15 @@ def send_sms(self, to, content, reference, sender=None, international=False): } } + non_scrubbable = " ".join(sender) + default_num = " ".join(self.current_app.config["AWS_US_TOLL_FREE_NUMBER"]) + self.current_app.logger.info( + f"notify-api-1385 sender {non_scrubbable} is a {type(sender)} default is a {type(default_num)}" + ) if self._valid_sender_number(sender): self.current_app.logger.info( - "aws_sns found a valid sender number here it is wait for it!" + f"notify-api-1385 use valid sender {non_scrubbable} instead of default {default_num}" ) - # To defeat scrubbing, sender numbers are not PII. - non_scrubbable = " ".join(sender) - self.current_app.logger.info(non_scrubbable) attributes["AWS.MM.SMS.OriginationNumber"] = { "DataType": "String", @@ -77,8 +79,9 @@ def send_sms(self, to, content, reference, sender=None, international=False): } else: self.current_app.logger.info( - "aws_sns did not find a valid sender number, defaulting to the toll free one" + f"notify-api-1385 use default {default_num} instead of invalid sender {non_scrubbable}" ) + attributes["AWS.MM.SMS.OriginationNumber"] = { "DataType": "String", "StringValue": self.current_app.config["AWS_US_TOLL_FREE_NUMBER"], From 151ea31f6366b121021379d0a2d5e480acaa648b Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 15:09:18 -0800 Subject: [PATCH 42/45] put number all on one line --- app/clients/sms/aws_sns.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index ddd406590..79a1ce471 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -63,11 +63,18 @@ def send_sms(self, to, content, reference, sender=None, international=False): } } - non_scrubbable = " ".join(sender) - default_num = " ".join(self.current_app.config["AWS_US_TOLL_FREE_NUMBER"]) - self.current_app.logger.info( - f"notify-api-1385 sender {non_scrubbable} is a {type(sender)} default is a {type(default_num)}" - ) + if isinstance(sender, str): + non_scrubbable = " ".join(sender) + default_num = " ".join( + self.current_app.config["AWS_US_TOLL_FREE_NUMBER"] + ) + self.current_app.logger.info( + f"notify-api-1385 sender {non_scrubbable} is a {type(sender)} default is a {type(default_num)}" + ) + else: + self.current_app.logger.warning( + f"notify-api-1385 sender is type {type(sender)}!! {sender}" + ) if self._valid_sender_number(sender): self.current_app.logger.info( f"notify-api-1385 use valid sender {non_scrubbable} instead of default {default_num}" @@ -79,7 +86,7 @@ def send_sms(self, to, content, reference, sender=None, international=False): } else: self.current_app.logger.info( - f"notify-api-1385 use default {default_num} instead of invalid sender {non_scrubbable}" + f"notify-api-1385 use default {default_num} instead of invalid sender" ) attributes["AWS.MM.SMS.OriginationNumber"] = { From 312ae5e89ecf0f172d1a6d422c0c1314724dc497 Mon Sep 17 00:00:00 2001 From: Kenneth Kehl <@kkehl@flexion.us> Date: Tue, 12 Nov 2024 15:19:52 -0800 Subject: [PATCH 43/45] put number all on one line --- app/clients/sms/aws_sns.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/app/clients/sms/aws_sns.py b/app/clients/sms/aws_sns.py index 79a1ce471..d36af600c 100644 --- a/app/clients/sms/aws_sns.py +++ b/app/clients/sms/aws_sns.py @@ -63,11 +63,10 @@ def send_sms(self, to, content, reference, sender=None, international=False): } } + default_num = " ".join(self.current_app.config["AWS_US_TOLL_FREE_NUMBER"]) if isinstance(sender, str): non_scrubbable = " ".join(sender) - default_num = " ".join( - self.current_app.config["AWS_US_TOLL_FREE_NUMBER"] - ) + self.current_app.logger.info( f"notify-api-1385 sender {non_scrubbable} is a {type(sender)} default is a {type(default_num)}" ) From 469103d4f1de3c71659858289e9e21fac30fce5f Mon Sep 17 00:00:00 2001 From: Carlo Costino Date: Fri, 25 Oct 2024 15:31:02 -0400 Subject: [PATCH 44/45] Make sandbox deployments easier This changeset makes a few adjustments to our sandbox environment config to make the sandbox deployments of the API much easier. It does this with the following: * Adds a few environment variable values to the sandbox.yml file to cover the ones that were not there previously * Adds a new script that can be run in place of memorizing any commands * Adds documentation on how to configure and run the sandbox environment Signed-off-by: Carlo Costino --- deploy-config/sandbox.yml | 5 +++++ docs/all.md | 42 +++++++++++++++++++++++++++--------- scripts/deploy_to_sandbox.sh | 15 +++++++++++++ 3 files changed, 52 insertions(+), 10 deletions(-) create mode 100755 scripts/deploy_to_sandbox.sh diff --git a/deploy-config/sandbox.yml b/deploy-config/sandbox.yml index d94339837..afaf40c52 100644 --- a/deploy-config/sandbox.yml +++ b/deploy-config/sandbox.yml @@ -9,5 +9,10 @@ admin_base_url: https://notify-sandbox.app.cloud.gov redis_enabled: 1 default_toll_free_number: "+18885989205" ADMIN_CLIENT_SECRET: sandbox-notify-secret-key +API_HOST_NAME: https://notify-api-sandbox.app.cloud.gov DANGEROUS_SALT: sandbox-notify-salt +LOGIN_DOT_GOV_REGISTRATION_URL: https://idp.int.identitysandbox.gov/openid_connect/authorize?acr_values=http%3A%2F%2Fidmanagement.gov%2Fns%2Fassurance%2Fial%2F1&client_id=urn:gov:gsa:openidconnect.profiles:sp:sso:gsa:test_notify_gov&nonce=NONCE&prompt=select_account&redirect_uri=https://notify-sandbox.app.cloud.gov/set-up-your-profile&response_type=code&scope=openid+email&state=STATE +NEW_RELIC_LICENSE_KEY: "" +NOTIFY_E2E_TEST_EMAIL: fake.user@example.com +NOTIFY_E2E_TEST_PASSWORD: "don't write secrets to the sample file" SECRET_KEY: sandbox-notify-secret-key diff --git a/docs/all.md b/docs/all.md index ccde4ede9..a4097194b 100644 --- a/docs/all.md +++ b/docs/all.md @@ -443,22 +443,44 @@ Rules for use: - Delete the apps and routes shown in `cf apps` by running `cf delete APP_NAME -r` - Delete the space deployer you created by following the instructions within `terraform/sandbox/secrets.auto.tfvars` -### Deploying to the sandbox +### Setting up the sandbox infrastructure If this is the first time you have used Terraform in this repository, you will first have to hook your copy of Terraform up to our remote state. Follow [Retrieving existing bucket credentials](https://github.com/GSA/notifications-api/tree/main/terraform#retrieving-existing-bucket-credentials). :anchor: The Admin app depends upon the API app, so set up the API first. 1. Set up services: - ```bash - $ cd terraform/sandbox - $ ../create_service_account.sh -s notify-sandbox -u -terraform -m > secrets.auto.tfvars - $ terraform init - $ terraform plan - $ terraform apply - ``` - Check [Terraform troubleshooting](https://github.com/GSA/notifications-api/tree/main/terraform#troubleshooting) if you encounter problems. -1. Change back to the project root directory: `cd ../..` + ```bash + $ cd terraform/sandbox + $ ../create_service_account.sh -s notify-sandbox -u -terraform -m > secrets.auto.tfvars + $ terraform init + $ terraform plan + $ terraform apply + ``` + Check [Terraform troubleshooting](https://github.com/GSA/notifications-api/tree/main/terraform#troubleshooting) if you encounter problems. + +Note that you'll have to do this for both the API and the Admin. Once this is complete we shouldn't have to do it again (unless we're setting up a new sandbox environment). + +### Deploying to the sandbox + +To deploy either the API or the Admin apps to the sandbox, the process is largely the same, but the Admin requires a bit of additional work. + +#### Deploying the API to the sandbox + +1. Make sure you are in the API project's root directory. +1. Authenticate with cloud.gov in the command line: `cf login -a api.fr.cloud.gov --sso` +1. Run `./scripts/deploy_to_sandbox.sh` from the project root directory. + +At this point your target org and space will change with cloud.gov to be the `notify-sandbox` environment and the application will be pushed for deployment. + +The script does a few things to make sure the deployment flows smoothly with miniminal work on your part: + +* Sets the target org and space in cloud.gov for you. +* Creates a `requirements.txt` file for the Python dependencies so that the deployment picks up on the dependencies properly. +* Pushes the application with the correct environment variables set based on what is supplied by the `deploy-config/sandbox.yml` file. + +#### Deploying the Admin to the sandbox + 1. Start a poetry shell as a shortcut to load `.env` file variables by running `poetry shell`. (You'll have to restart this any time you change the file.) 1. Output requirements.txt file: `poetry export --without-hashes --format=requirements.txt > requirements.txt` 1. Ensure you are using the correct CloudFoundry target diff --git a/scripts/deploy_to_sandbox.sh b/scripts/deploy_to_sandbox.sh new file mode 100755 index 000000000..306fb0af9 --- /dev/null +++ b/scripts/deploy_to_sandbox.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# Create a requirements.txt file so dependencies are properly managed with the +# deploy. This will overwrite any existing requirements.txt file to make sure +# it is always up-to-date. +poetry export --without-hashes --format=requirements.txt > requirements.txt + +# Target the notify-sandbox space and deploy to cloud.gov with a cf push. +# All environment variables are accounted for in the deploy-config/sandbox.yml +# file, no need to add any of your own or source a .env* file. + +# If ithis errors out because you need to be logged in, login first with this: +# cf login -a api.fr.cloud.gov --sso +cf target -o gsa-tts-benefits-studio -s notify-sandbox +cf push -f manifest.yml --vars-file deploy-config/staging.yml --strategy rolling From 4155b54467ee1769617bcf8458b69a6e680b4c99 Mon Sep 17 00:00:00 2001 From: Carlo Costino Date: Wed, 30 Oct 2024 15:01:48 -0400 Subject: [PATCH 45/45] Update .gitignore and fix a typo Signed-off-by: Carlo Costino --- .gitignore | 1 + scripts/deploy_to_sandbox.sh | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index f60b72b58..cf35582a6 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,7 @@ var/ .installed.cfg *.egg /cache +requirements.txt # PyInstaller # Usually these files are written by a python script from a template diff --git a/scripts/deploy_to_sandbox.sh b/scripts/deploy_to_sandbox.sh index 306fb0af9..683e875b1 100755 --- a/scripts/deploy_to_sandbox.sh +++ b/scripts/deploy_to_sandbox.sh @@ -9,7 +9,7 @@ poetry export --without-hashes --format=requirements.txt > requirements.txt # All environment variables are accounted for in the deploy-config/sandbox.yml # file, no need to add any of your own or source a .env* file. -# If ithis errors out because you need to be logged in, login first with this: +# If this errors out because you need to be logged in, login first with this: # cf login -a api.fr.cloud.gov --sso cf target -o gsa-tts-benefits-studio -s notify-sandbox -cf push -f manifest.yml --vars-file deploy-config/staging.yml --strategy rolling +cf push -f manifest.yml --vars-file deploy-config/sandbox.yml --strategy rolling