diff --git a/.ds.baseline b/.ds.baseline index cec28396cd..bb6b3ce39d 100644 --- a/.ds.baseline +++ b/.ds.baseline @@ -169,7 +169,7 @@ "filename": "app/config.py", "hashed_secret": "577a4c667e4af8682ca431857214b3a920883efc", "is_verified": false, - "line_number": 111, + "line_number": 117, "is_secret": false } ], @@ -423,7 +423,7 @@ "filename": "app/templates/new/components/head.html", "hashed_secret": "ee5048791fc7ff45a1545e24f85bec3317371327", "is_verified": false, - "line_number": 35, + "line_number": 34, "is_secret": false } ], @@ -586,20 +586,12 @@ } ], "tests/app/main/views/test_sign_in.py": [ - { - "type": "Private Key", - "filename": "tests/app/main/views/test_sign_in.py", - "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", - "is_verified": false, - "line_number": 31, - "is_secret": false - }, { "type": "Secret Keyword", "filename": "tests/app/main/views/test_sign_in.py", "hashed_secret": "8b8b69116ee882b5e987e330f55db81aba0636f9", "is_verified": false, - "line_number": 104, + "line_number": 90, "is_secret": false } ], @@ -675,7 +667,7 @@ "filename": "tests/conftest.py", "hashed_secret": "f8377c90fcfd699f0ddbdcb30c2c9183d2d933ea", "is_verified": false, - "line_number": 3289, + "line_number": 3266, "is_secret": false } ], @@ -710,5 +702,5 @@ } ] }, - "generated_at": "2024-05-20T16:03:05Z" + "generated_at": "2024-06-11T22:26:18Z" } diff --git a/.github/actions/setup-project/action.yml b/.github/actions/setup-project/action.yml index b2d5829865..db1540fad5 100644 --- a/.github/actions/setup-project/action.yml +++ b/.github/actions/setup-project/action.yml @@ -9,10 +9,10 @@ runs: sudo apt-get update \ && sudo apt-get install -y --no-install-recommends \ libcurl4-openssl-dev - - name: Set up Python 3.12 + - name: Set up Python 3.12.3 uses: actions/setup-python@v4 with: - python-version: "3.12" + python-version: "3.12.3" - name: Install poetry shell: bash run: pip install poetry diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000..d4d9a1328f --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,95 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "main", "production" ] + pull_request: + branches: [ "main", "production" ] + schedule: + - cron: '18 5 * * 3' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: javascript-typescript + build-mode: none + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/deploy-demo.yml b/.github/workflows/deploy-demo.yml index dc725f1573..a8adfa9188 100644 --- a/.github/workflows/deploy-demo.yml +++ b/.github/workflows/deploy-demo.yml @@ -18,7 +18,7 @@ jobs: - name: Check for changes to Terraform id: changed-terraform-files - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v44 with: files: | terraform/demo @@ -88,7 +88,7 @@ jobs: - name: Check for changes to egress config id: changed-egress-config - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v44 with: files: | deploy-config/egress_proxy/notify-admin-demo.*.acl diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml index d614bf3091..940e11faf5 100644 --- a/.github/workflows/deploy-prod.yml +++ b/.github/workflows/deploy-prod.yml @@ -18,7 +18,7 @@ jobs: - name: Check for changes to Terraform id: changed-terraform-files - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v44 with: files: | terraform/production @@ -88,7 +88,7 @@ jobs: - name: Check for changes to egress config id: changed-egress-config - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v44 with: files: | deploy-config/egress_proxy/notify-admin-production.*.acl diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index d74ba3133d..f3b6133a4c 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -23,7 +23,7 @@ jobs: - name: Check for changes to Terraform id: changed-terraform-files - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v44 with: files: | terraform/staging @@ -95,7 +95,7 @@ jobs: - name: Check for changes to egress config id: changed-egress-config - uses: tj-actions/changed-files@v41.0.0 + uses: tj-actions/changed-files@v44 with: files: | deploy-config/egress_proxy/notify-admin-staging.*.acl diff --git a/.nvmrc b/.nvmrc index d9289897d3..cb406c60ce 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -16.15.1 +16.20.2 diff --git a/Makefile b/Makefile index eb6bb81316..90e8e76de5 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,8 @@ bootstrap: generate-version-file ## Set up everything to run the app poetry install --sync --no-root poetry run playwright install --with-deps poetry run pre-commit install - source $(NVMSH) --no-use && nvm install && npm ci --no-audit + source $(NVMSH) --no-use && nvm install && npm install + source $(NVMSH) && npm ci --no-audit source $(NVMSH) && npm run build .PHONY: watch-frontend @@ -40,6 +41,10 @@ run-flask-bare: ## Run flask without invoking poetry so we can override ENV var npm-audit: ## Check for vulnerabilities in NPM packages source $(NVMSH) && npm run audit +.PHONY: npm-audit-fix +npm-audit-fix: ## Fix vulnerabilities that do not require attentino (according to npm) + source $(NVMSH) && npm audit fix + .PHONY: help help: @cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' @@ -76,8 +81,9 @@ py-test: ## Run python unit tests poetry run coverage html -d .coverage_cache .PHONY: dead-code -dead-code: - poetry run vulture ./app --min-confidence=100 +dead-code: ## 60% is our aspirational goal, but currently breaks the build + poetry run vulture ./app ./notifications_utils --min-confidence=100 + .PHONY: e2e-test e2e-test: export NEW_RELIC_ENVIRONMENT=test diff --git a/README.md b/README.md index 1c4df2aa5e..15a86ae009 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ You will need the following items: [Follow the instructions here to set up the Notify.gov API.](https://github.com/GSA/notifications-api#before-you-start) The Notify.gov API is required in order for the Notify.gov Admin UI to run, and -it will also take care of many of the steps that are listed here. The sections +it will also take care of many of the steps that are listed here. The sections that are a repeat from the API setup are flagged with an **[API Step]** label in front of them. @@ -83,11 +83,13 @@ Your system `$PATH` environment variable is likely set in one of these locations: For BASH shells: + - `~/.bashrc` - `~/.bash_profile` - `~/.profile` For ZSH shells: + - `~/.zshrc` - `~/.zprofile` @@ -97,7 +99,7 @@ environments. Which file you need to modify depends on whether or not you are running an interactive shell or a login shell (see [this Stack Overflow post](https://stackoverflow.com/questions/18186929/what-are-the-differences-between-a-login-shell-and-interactive-shell) -for an explanation of the differences). If you're still not sure, please ask +for an explanation of the differences). If you're still not sure, please ask the team for help! Once you determine which file you'll need to modify, add these lines before any @@ -158,7 +160,7 @@ _NOTE: This project currently uses the latest `1.4.x release of Terraform._ #### [API Step] Python Installation Now we're going to install a tool to help us manage Python versions and -virtual environments on our system. First, we'll install +virtual environments on our system. First, we'll install [pyenv](https://github.com/pyenv/pyenv) and one of its plugins, [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv), with Homebrew: @@ -285,7 +287,7 @@ we'll use `3.12` in our example here since we recently upgraded to this version: pyenv install 3.12 ``` -Next, delete the virtual environment you previously had set up. If you followed +Next, delete the virtual environment you previously had set up. If you followed the instructions above with the first-time set up, you can do this with `pyenv`: ```sh @@ -306,6 +308,20 @@ you'll be set with an upgraded version of Python. _If you're not sure about the details of your current virtual environment, you can run `poetry env info` to get more information. If you've been using `pyenv` for everything, you can also see all available virtual environments with `pyenv virtualenvs`._ +#### Updating the .env file for Login.gov + +To configure the application for Login.gov, you will need to update the following environment variables in the .env file: + +``` +COMMIT_HASH=”--------” +``` + +Reach out to someone on the team to get the most recent Login.gov key. + +``` +LOGIN_PEM="INSERT_LOGIN_GOV_KEY_HERE" +``` + #### Updating the .env file for E2E tests With the newly created `.env` file in place, you'll need to make one more @@ -353,7 +369,7 @@ API is running as well! ## Creating a 'First User' in the database After you have completed all setup steps, you will be unable to log in, because there -will not be a user in the database to link to the login.gov account you are using. So +will not be a user in the database to link to the login.gov account you are using. So you will need to create that user in your database using the 'create-test-user' command. Open two terminals pointing to the api project and then run these commands in the @@ -372,8 +388,6 @@ is the same one you are using in login.gov and make sure your phone number is in If for any reason in the course of development it is necessary for your to delete your db via the `dropdb` command, you will need to repeat these steps when you recreate your db. - - ## Git Hooks We're using [`pre-commit`](https://pre-commit.com/) to manage hooks in order to diff --git a/app/__init__.py b/app/__init__.py index 5c9283c79b..76f3664cdd 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -18,6 +18,7 @@ ) from flask.globals import request_ctx from flask_login import LoginManager, current_user +from flask_socketio import SocketIO from flask_talisman import Talisman from flask_wtf import CSRFProtect from flask_wtf.csrf import CSRFError @@ -30,7 +31,7 @@ from app import proxy_fix from app.asset_fingerprinter import asset_fingerprinter from app.config import configs -from app.extensions import redis_client, zendesk_client +from app.extensions import redis_client from app.formatters import ( convert_markdown_template, convert_to_boolean, @@ -118,6 +119,7 @@ login_manager = LoginManager() csrf = CSRFProtect() talisman = Talisman() +socketio = SocketIO() # The current service attached to the request stack. @@ -175,6 +177,7 @@ def create_app(application): init_govuk_frontend(application) init_jinja(application) + socketio.init_app(application) for client in ( csrf, @@ -202,7 +205,6 @@ def create_app(application): user_api_client, # External API clients redis_client, - zendesk_client, ): client.init_app(application) @@ -231,6 +233,24 @@ def create_app(application): ) logging.init_app(application) + # Hopefully will help identify if there is a race condition causing the CSRF errors + # that we have occasionally seen in our environments. + for key in ("SECRET_KEY", "DANGEROUS_SALT"): + try: + value = application.config[key] + except KeyError: + application.logger.error(f"Env Var {key} doesn't exist.") + else: + try: + data_len = len(value.strip()) + except (TypeError, AttributeError): + application.logger.error(f"Env Var {key} invalid type: {type(value)}") + else: + if data_len: + application.logger.info(f"Env Var {key} is a non-zero length.") + else: + application.logger.error(f"Env Var {key} is empty.") + login_manager.login_view = "main.sign_in" login_manager.login_message_category = "default" login_manager.session_protection = None diff --git a/app/assets/images/logo-login.svg b/app/assets/images/logo-login.svg new file mode 100644 index 0000000000..0197133775 --- /dev/null +++ b/app/assets/images/logo-login.svg @@ -0,0 +1 @@ + diff --git a/app/assets/javascripts/chartDashboard.js b/app/assets/javascripts/chartDashboard.js deleted file mode 100644 index fc40936f89..0000000000 --- a/app/assets/javascripts/chartDashboard.js +++ /dev/null @@ -1,24 +0,0 @@ -(function (window) { - - const ctx = document.getElementById('myChart'); - - new Chart(ctx, { - type: 'bar', - data: { - labels: ['Red', 'Blue', 'Yellow', 'Green', 'Purple', 'Orange'], - datasets: [{ - label: '# of Votes', - data: [12, 19, 3, 5, 2, 3], - borderWidth: 1 - }] - }, - options: { - scales: { - y: { - beginAtZero: true - } - } - } - }); - -})(window); diff --git a/app/assets/javascripts/sampleChartDashboard.js b/app/assets/javascripts/sampleChartDashboard.js new file mode 100644 index 0000000000..d191abef14 --- /dev/null +++ b/app/assets/javascripts/sampleChartDashboard.js @@ -0,0 +1,66 @@ +(function (window) { + + function initializeChartAndSocket() { + var ctx = document.getElementById('myChart'); + if (!ctx) { + return; + } + + var myBarChart = new Chart(ctx.getContext('2d'), { + type: 'bar', + data: { + labels: [], + datasets: [ + { + label: 'Delivered', + data: [], + backgroundColor: '#0076d6', + stack: 'Stack 0' + }, + ] + }, + options: { + scales: { + y: { + beginAtZero: true + } + } + } + }); + + var socket = io(); + var serviceId = ctx.getAttribute('data-service-id'); + + socket.on('connect', function() { + socket.emit('fetch_daily_stats', serviceId); + }); + + socket.on('daily_stats_update', function(data) { + var labels = []; + var deliveredData = []; + + for (var date in data) { + labels.push(date); + deliveredData.push(data[date].sms.delivered); + } + + myBarChart.data.labels = labels; + myBarChart.data.datasets[0].data = deliveredData; + myBarChart.update(); + }); + + socket.on('error', function(data) { + console.log('Error:', data); + }); + + var sevenDaysButton = document.getElementById('sevenDaysButton'); + if (sevenDaysButton) { + sevenDaysButton.addEventListener('click', function() { + socket.emit('fetch_daily_stats', serviceId); + }); + } + } + + document.addEventListener('DOMContentLoaded', initializeChartAndSocket); + +})(window); diff --git a/app/assets/sass/uswds/_uswds-theme-custom-styles.scss b/app/assets/sass/uswds/_uswds-theme-custom-styles.scss index d66e276bdb..efe86c7635 100644 --- a/app/assets/sass/uswds/_uswds-theme-custom-styles.scss +++ b/app/assets/sass/uswds/_uswds-theme-custom-styles.scss @@ -157,6 +157,18 @@ td.table-empty-message { } } +.usa-button img { + margin-left: .5rem; + height: 1rem; +} + +.usa-button.login-button.login-button--primary,.login-button.login-button--primary:hover{ + color:#112e51;background-color:#fff; + border:1px solid #767676; + display: inline-flex; + justify-content: center; +} + .user-list-edit-link:active:before, .user-list-edit-link:focus:before { box-shadow: none; diff --git a/app/config.py b/app/config.py index 8ec4db1866..960d6331b0 100644 --- a/app/config.py +++ b/app/config.py @@ -38,7 +38,7 @@ class Config(object): NR_MONITOR_ON = settings and settings.monitor_mode COMMIT_HASH = getenv("COMMIT_HASH", "--------")[0:7] - GOVERNMENT_EMAIL_DOMAIN_NAMES = ["gov"] + GOVERNMENT_EMAIL_DOMAIN_NAMES = ["gov", "mil", "si.edu"] # Logging NOTIFY_LOG_LEVEL = getenv("NOTIFY_LOG_LEVEL", "INFO") @@ -53,7 +53,13 @@ class Config(object): PERMANENT_SESSION_LIFETIME = 1800 # 30 Minutes SEND_FILE_MAX_AGE_DEFAULT = 365 * 24 * 60 * 60 # 1 year REPLY_TO_EMAIL_ADDRESS_VALIDATION_TIMEOUT = 45 - ACTIVITY_STATS_LIMIT_DAYS = 7 + ACTIVITY_STATS_LIMIT_DAYS = { + "today": 0, + "one_day": 1, + "three_day": 3, + "five_day": 5, + "seven_day": 7, + } SESSION_COOKIE_HTTPONLY = True SESSION_COOKIE_NAME = "notify_admin_session" SESSION_COOKIE_SECURE = True diff --git a/app/extensions.py b/app/extensions.py index 8bbb874a3c..e322e46d06 100644 --- a/app/extensions.py +++ b/app/extensions.py @@ -1,5 +1,3 @@ from notifications_utils.clients.redis.redis_client import RedisClient -from notifications_utils.clients.zendesk.zendesk_client import ZendeskClient -zendesk_client = ZendeskClient() redis_client = RedisClient() diff --git a/app/main/views/dashboard.py b/app/main/views/dashboard.py index 3bbf432b31..a64444dc23 100644 --- a/app/main/views/dashboard.py +++ b/app/main/views/dashboard.py @@ -6,6 +6,7 @@ from flask import Response, abort, jsonify, render_template, request, session, url_for from flask_login import current_user +from flask_socketio import emit from werkzeug.utils import redirect from app import ( @@ -14,6 +15,7 @@ job_api_client, notification_api_client, service_api_client, + socketio, template_statistics_client, ) from app.formatters import format_date_numeric, format_datetime_numeric, get_time_left @@ -32,6 +34,18 @@ from notifications_utils.recipients import format_phone_number_human_readable +@socketio.on("fetch_daily_stats") +def handle_fetch_daily_stats(service_id): + if service_id: + date_range = get_stats_date_range() + daily_stats = service_api_client.get_service_notification_statistics_by_day( + service_id, start_date=date_range["start_date"], days=date_range["days"] + ) + emit("daily_stats_update", daily_stats) + else: + emit("error", {"error": "No service_id provided"}) + + @main.route("/services//dashboard") @user_has_permissions("view_activity", "send_messages") def old_service_dashboard(service_id): @@ -84,6 +98,7 @@ def service_dashboard(service_id): partials=get_dashboard_partials(service_id), job_and_notifications=job_and_notifications, service_data_retention_days=service_data_retention_days, + service_id=service_id, ) @@ -434,6 +449,24 @@ def get_months_for_financial_year(year, time_format="%B"): return [month.strftime(time_format) for month in (get_months_for_year(1, 13, year))] +def get_current_month_for_financial_year(year): + current_month = datetime.now().month + return current_month + + +def get_stats_date_range(): + current_financial_year = get_current_financial_year() + current_month = get_current_month_for_financial_year(current_financial_year) + start_date = datetime.now().strftime("%Y-%m-%d") + days = 7 + return { + "current_financial_year": current_financial_year, + "current_month": current_month, + "start_date": start_date, + "days": days, + } + + def get_months_for_year(start, end, year): return [datetime(year, month, 1) for month in range(start, end)] diff --git a/app/main/views/index.py b/app/main/views/index.py index c68605b2ef..ec489d5ac9 100644 --- a/app/main/views/index.py +++ b/app/main/views/index.py @@ -1,6 +1,6 @@ import os -from flask import abort, redirect, render_template, request, url_for +from flask import abort, current_app, redirect, render_template, request, url_for from flask_login import current_user from app import status_api_client @@ -9,20 +9,28 @@ from app.main.views.pricing import CURRENT_SMS_RATE from app.main.views.sub_navigation_dictionaries import features_nav, using_notify_nav from app.utils.user import user_is_logged_in - -login_dot_gov_url = os.getenv("LOGIN_DOT_GOV_INITIAL_SIGNIN_URL") +from notifications_utils.url_safe_token import generate_token @main.route("/") def index(): if current_user and current_user.is_authenticated: return redirect(url_for("main.choose_account")) - + token = generate_token( + str(request.remote_addr), + current_app.config["SECRET_KEY"], + current_app.config["DANGEROUS_SALT"], + ) + url = os.getenv("LOGIN_DOT_GOV_INITIAL_SIGNIN_URL") + # handle unit tests + if url is not None: + url = url.replace("NONCE", token) + url = url.replace("STATE", token) return render_template( "views/signedout.html", sms_rate=CURRENT_SMS_RATE, counts=status_api_client.get_count_of_live_services_and_organizations(), - login_dot_gov_url=login_dot_gov_url, + initial_signin_url=url, ) diff --git a/app/main/views/jobs.py b/app/main/views/jobs.py index 010d19b264..42a4de0905 100644 --- a/app/main/views/jobs.py +++ b/app/main/views/jobs.py @@ -143,11 +143,40 @@ def view_notifications(service_id, message_type=None): True: ["reference"], False: [], }.get(bool(current_service.api_keys)), - download_link=url_for( + download_link_one_day=url_for( ".download_notifications_csv", service_id=current_service.id, message_type=message_type, status=request.args.get("status"), + number_of_days="one_day", + ), + download_link_today=url_for( + ".download_notifications_csv", + service_id=current_service.id, + message_type=message_type, + status=request.args.get("status"), + number_of_days="today", + ), + download_link_three_day=url_for( + ".download_notifications_csv", + service_id=current_service.id, + message_type=message_type, + status=request.args.get("status"), + number_of_days="three_day", + ), + download_link_five_day=url_for( + ".download_notifications_csv", + service_id=current_service.id, + message_type=message_type, + status=request.args.get("status"), + number_of_days="five_day", + ), + download_link_seven_day=url_for( + ".download_notifications_csv", + service_id=current_service.id, + message_type=message_type, + status=request.args.get("status"), + number_of_days="seven_day", ), ) @@ -183,10 +212,9 @@ def get_notifications(service_id, message_type, status_override=None): # noqa filter_args["status"] = set_status_filters(filter_args) service_data_retention_days = None search_term = request.form.get("to", "") - if message_type is not None: service_data_retention_days = current_service.get_days_of_retention( - message_type + message_type, number_of_days="seven_day" ) if request.path.endswith("csv") and current_user.has_permissions("view_activity"): @@ -212,7 +240,6 @@ def get_notifications(service_id, message_type, status_override=None): # noqa ) url_args = {"message_type": message_type, "status": request.args.get("status")} prev_page = None - if "links" in notifications and notifications["links"].get("prev", None): prev_page = generate_previous_dict( "main.view_notifications", service_id, page, url_args=url_args @@ -233,7 +260,6 @@ def get_notifications(service_id, message_type, status_override=None): # noqa ) else: download_link = None - return { "service_data_retention_days": service_data_retention_days, "counts": render_template( @@ -362,6 +388,7 @@ def get_job_partials(job): filter_args = parse_filter_args(request.args) filter_args["status"] = set_status_filters(filter_args) notifications = job.get_notifications(status=filter_args["status"]) + number_of_days = "seven_day" counts = render_template( "partials/count.html", counts=_get_job_counts(job), @@ -371,7 +398,7 @@ def get_job_partials(job): ), ) service_data_retention_days = current_service.get_days_of_retention( - job.template_type + job.template_type, number_of_days ) if request.referrer is not None: diff --git a/app/main/views/notifications.py b/app/main/views/notifications.py index ac05e05ffe..e41708b8c2 100644 --- a/app/main/views/notifications.py +++ b/app/main/views/notifications.py @@ -137,9 +137,9 @@ def get_all_personalisation_from_notification(notification): def download_notifications_csv(service_id): filter_args = parse_filter_args(request.args) filter_args["status"] = set_status_filters(filter_args) - + number_of_days = request.args["number_of_days"] service_data_retention_days = current_service.get_days_of_retention( - filter_args.get("message_type")[0] + filter_args.get("message_type")[0], number_of_days ) file_time = datetime.now().strftime("%Y-%m-%d %I:%M:%S %p") file_time = f"{file_time} {get_user_preferred_timezone()}" diff --git a/app/main/views/register.py b/app/main/views/register.py index 2829d37bb3..761582b3d9 100644 --- a/app/main/views/register.py +++ b/app/main/views/register.py @@ -26,6 +26,7 @@ from app.main.views.verify import activate_user from app.models.user import InvitedOrgUser, InvitedUser, User from app.utils import hide_from_search_engines, hilite +from app.utils.user import is_gov_user @main.route("/register", methods=["GET", "POST"]) @@ -115,10 +116,10 @@ def registration_continue(): def get_invite_data_from_redis(state): - invite_data = json.loads(redis_client.raw_get(f"invitedata-{state}")) - user_email = redis_client.raw_get(f"user_email-{state}").decode("utf8") - user_uuid = redis_client.raw_get(f"user_uuid-{state}").decode("utf8") - invited_user_email_address = redis_client.raw_get( + invite_data = json.loads(redis_client.get(f"invitedata-{state}")) + user_email = redis_client.get(f"user_email-{state}").decode("utf8") + user_uuid = redis_client.get(f"user_uuid-{state}").decode("utf8") + invited_user_email_address = redis_client.get( f"invited_user_email_address-{state}" ).decode("utf8") return invite_data, user_email, user_uuid, invited_user_email_address @@ -129,10 +130,10 @@ def put_invite_data_in_redis( ): ttl = 60 * 15 # 15 minutes - redis_client.raw_set(f"invitedata-{state}", json.dumps(invite_data), ex=ttl) - redis_client.raw_set(f"user_email-{state}", user_email, ex=ttl) - redis_client.raw_set(f"user_uuid-{state}", user_uuid, ex=ttl) - redis_client.raw_set( + redis_client.set(f"invitedata-{state}", json.dumps(invite_data), ex=ttl) + redis_client.set(f"user_email-{state}", user_email, ex=ttl) + redis_client.set(f"user_uuid-{state}", user_uuid, ex=ttl) + redis_client.set( f"invited_user_email_address-{state}", invited_user_email_address, ex=ttl, @@ -147,6 +148,11 @@ def check_invited_user_email_address_matches_expected( flash("You cannot accept an invite for another person.") abort(403) + if not is_gov_user(user_email): + debug_msg("invited user has a non-government email address.") + flash("You must use a government email address.") + abort(403) + @main.route("/set-up-your-profile", methods=["GET", "POST"]) @hide_from_search_engines @@ -157,7 +163,7 @@ def set_up_your_profile(): state = request.args.get("state") login_gov_error = request.args.get("error") - if redis_client.raw_get(f"invitedata-{state}") is None: + if redis_client.get(f"invitedata-{state}") is None: access_token = sign_in._get_access_token(code, state) debug_msg("Got the access token for login.gov") user_email, user_uuid = sign_in._get_user_email_and_uuid(access_token) @@ -189,7 +195,7 @@ def set_up_your_profile(): if ( form.validate_on_submit() - and redis_client.raw_get(f"invitedata-{state}") is not None + and redis_client.get(f"invitedata-{state}") is not None ): invite_data, user_email, user_uuid, invited_user_email_address = ( get_invite_data_from_redis(state) @@ -245,10 +251,21 @@ def get_invited_user_email_address(invited_user_id): def invited_user_accept_invite(invited_user_id): invited_user = InvitedUser.by_id(invited_user_id) + if invited_user.status == "expired": current_app.logger.error("User invitation has expired") - flash("Your invitation has expired.") + flash( + "Your invitation has expired; please contact the person who invited you for additional help." + ) abort(401) + + if invited_user.status == "cancelled": + current_app.logger.error("User invitation has been cancelled") + flash( + "Your invitation is no longer valid; please contact the person who invited you for additional help." + ) + abort(401) + invited_user.accept_invite() diff --git a/app/main/views/send.py b/app/main/views/send.py index abf17fded8..bb63f10a94 100644 --- a/app/main/views/send.py +++ b/app/main/views/send.py @@ -3,7 +3,16 @@ from string import ascii_uppercase from zipfile import BadZipFile -from flask import abort, flash, redirect, render_template, request, session, url_for +from flask import ( + abort, + current_app, + flash, + redirect, + render_template, + request, + session, + url_for, +) from flask_login import current_user from markupsafe import Markup from notifications_python_client.errors import HTTPError @@ -31,7 +40,12 @@ s3upload, set_metadata_on_csv_upload, ) -from app.utils import PermanentRedirect, should_skip_template_page, unicode_truncate +from app.utils import ( + PermanentRedirect, + hilite, + should_skip_template_page, + unicode_truncate, +) from app.utils.csv import Spreadsheet, get_errors_for_csv from app.utils.templates import get_template from app.utils.user import user_has_permissions @@ -948,9 +962,17 @@ def send_notification(service_id, template_id): vals = ",".join(values) data = f"{data}\r\n{vals}" - filename = f"one-off-{current_user.name}-{uuid.uuid4()}.csv" + filename = ( + f"one-off-{uuid.uuid4()}.csv" # {current_user.name} removed from filename + ) my_data = {"filename": filename, "template_id": template_id, "data": data} upload_id = s3upload(service_id, my_data) + + # To debug messages that the user reports have not been sent, we log + # the csv filename and the job id. The user will give us the file name, + # so we can search on that to obtain the job id, which we can use elsewhere + # on the API side to find out what happens to the message. + current_app.logger.info(hilite(f"One-off file: {filename} job_id: {upload_id}")) form = CsvUploadForm() form.file.data = my_data form.file.name = filename @@ -1000,7 +1022,12 @@ def send_notification(service_id, template_id): job_id=upload_id, ) ) - + total = notifications["total"] + current_app.logger.info( + hilite( + f"job_id: {upload_id} has notifications: {total} and attempts: {attempts}" + ) + ) return redirect( url_for( ".view_job", diff --git a/app/main/views/sign_in.py b/app/main/views/sign_in.py index d00d368921..9f089fe42c 100644 --- a/app/main/views/sign_in.py +++ b/app/main/views/sign_in.py @@ -4,7 +4,16 @@ import jwt import requests -from flask import Response, current_app, redirect, render_template, request, url_for +from flask import ( + Response, + abort, + current_app, + flash, + redirect, + render_template, + request, + url_for, +) from flask_login import current_user from app import login_manager, user_api_client @@ -15,6 +24,7 @@ from app.utils import hide_from_search_engines from app.utils.login import is_safe_redirect_url from app.utils.time import is_less_than_days_ago +from app.utils.user import is_gov_user from notifications_utils.url_safe_token import generate_token @@ -88,6 +98,12 @@ def _do_login_dot_gov(): try: access_token = _get_access_token(code, state) user_email, user_uuid = _get_user_email_and_uuid(access_token) + if not is_gov_user(user_email): + current_app.logger.error( + "invited user has a non-government email address." + ) + flash("You must use a government email address.") + abort(403) redirect_url = request.args.get("next") user = user_api_client.get_user_by_uuid_or_email(user_uuid, user_email) diff --git a/app/main/views/sub_navigation_dictionaries.py b/app/main/views/sub_navigation_dictionaries.py index 5e32bc003b..5c7cf26bac 100644 --- a/app/main/views/sub_navigation_dictionaries.py +++ b/app/main/views/sub_navigation_dictionaries.py @@ -32,7 +32,7 @@ def using_notify_nav(): "link": "main.trial_mode_new", }, { - "name": "Pricing", + "name": "Tracking usage", "link": "main.pricing", }, { diff --git a/app/main/views/verify.py b/app/main/views/verify.py index 4b3e2aad13..f6dec6fa2e 100644 --- a/app/main/views/verify.py +++ b/app/main/views/verify.py @@ -66,7 +66,7 @@ def activate_user(user_id): user = User.from_id(user_id) # TODO add org invites back in the new way - # organization_id = redis_client.raw_get( + # organization_id = redis_client.get( # f"organization-invite-{user.email_address}" # ) # user_api_client.add_user_to_organization( diff --git a/app/models/service.py b/app/models/service.py index 375047d8d4..e9bcf8a7da 100644 --- a/app/models/service.py +++ b/app/models/service.py @@ -390,7 +390,7 @@ def data_retention(self): def get_data_retention_item(self, id): return next((dr for dr in self.data_retention if dr["id"] == id), None) - def get_days_of_retention(self, notification_type): + def get_days_of_retention(self, notification_type, number_of_days): return next( ( dr @@ -398,7 +398,10 @@ def get_days_of_retention(self, notification_type): if dr["notification_type"] == notification_type ), {}, - ).get("days_of_retention", current_app.config["ACTIVITY_STATS_LIMIT_DAYS"]) + ).get( + "days_of_retention", + current_app.config["ACTIVITY_STATS_LIMIT_DAYS"].get(number_of_days), + ) @cached_property def organization(self): diff --git a/app/notify_client/service_api_client.py b/app/notify_client/service_api_client.py index d34516b8bd..42f54572fa 100644 --- a/app/notify_client/service_api_client.py +++ b/app/notify_client/service_api_client.py @@ -43,6 +43,16 @@ def get_service_statistics(self, service_id, limit_days=None): params={"limit_days": limit_days}, )["data"] + def get_service_notification_statistics_by_day( + self, service_id, start_date=None, days=None + ): + if start_date is None: + start_date = datetime.now().strftime("%Y-%m-%d") + + return self.get( + "/service/{0}/statistics/{1}/{2}".format(service_id, start_date, days), + )["data"] + def get_services(self, params_dict=None): """ Retrieve a list of services. diff --git a/app/s3_client/__init__.py b/app/s3_client/__init__.py index e0933b4644..7de3509d2f 100644 --- a/app/s3_client/__init__.py +++ b/app/s3_client/__init__.py @@ -1,3 +1,5 @@ +import os + import botocore from boto3 import Session from botocore.config import Config @@ -29,6 +31,17 @@ def get_s3_object( ) s3 = session.resource("s3", config=AWS_CLIENT_CONFIG) obj = s3.Object(bucket_name, filename) + # This 'proves' that use of moto in the relevant tests in test_send.py + # mocks everything related to S3. What you will see in the logs is: + # Exception: CREATED AT + # + # raise Exception(f"CREATED AT {_s3.Bucket(bucket_name).creation_date}") + if os.getenv("NOTIFY_ENVIRONMENT") == "test": + teststr = str(s3.Bucket(bucket_name).creation_date).lower() + if "magicmock" not in teststr: + raise Exception( + "Test is not mocked, use @mock_aws or the relevant mocker.patch to avoid accessing S3" + ) return obj diff --git a/app/s3_client/s3_csv_client.py b/app/s3_client/s3_csv_client.py index 21c3298873..752f054a45 100644 --- a/app/s3_client/s3_csv_client.py +++ b/app/s3_client/s3_csv_client.py @@ -28,6 +28,7 @@ def get_csv_upload(service_id, upload_id): def s3upload(service_id, filedata): + upload_id = str(uuid.uuid4()) bucket_name, file_location, access_key, secret_key, region = get_csv_location( service_id, upload_id diff --git a/app/templates/base.html b/app/templates/base.html index 8a6ba1ea8b..4a6421afbf 100644 --- a/app/templates/base.html +++ b/app/templates/base.html @@ -47,7 +47,7 @@ {% endblock %} {% block content %} {% block flash_messages %} - {% include 'flash_messages.html' %} + {% include 'new/components/flash_messages.html' %} {% endblock %} {% block maincolumn_content %}
diff --git a/app/templates/flash_messages.html b/app/templates/new/components/flash_messages.html similarity index 100% rename from app/templates/flash_messages.html rename to app/templates/new/components/flash_messages.html diff --git a/app/templates/new/components/head.html b/app/templates/new/components/head.html index 51f3c4da33..dd7519cf4b 100644 --- a/app/templates/new/components/head.html +++ b/app/templates/new/components/head.html @@ -31,7 +31,6 @@ {# google #} - {% if g.hide_from_search_engines %} diff --git a/app/templates/new/components/service_navigation.html b/app/templates/new/components/service_nav.html similarity index 100% rename from app/templates/new/components/service_navigation.html rename to app/templates/new/components/service_nav.html diff --git a/app/templates/settings_nav.html b/app/templates/new/components/settings_nav.html similarity index 100% rename from app/templates/settings_nav.html rename to app/templates/new/components/settings_nav.html diff --git a/app/templates/new/layouts/withnav_template.html b/app/templates/new/layouts/withnav_template.html deleted file mode 100644 index 30d38b4bc6..0000000000 --- a/app/templates/new/layouts/withnav_template.html +++ /dev/null @@ -1,57 +0,0 @@ -{% extends "/new/base.html" %} - -{% block per_page_title %} - {% block service_page_title %}{% endblock %}{% if current_service.name %} – {{ current_service.name }}{% endif %} - {% block org_page_title %}{% endblock %}{% if current_org.name %} – {{ current_org.name }}{% endif %} -{% endblock %} - -{% block main %} -
- {% block serviceNavigation %} - {% if current_org.name %} - {% else %} - {% include "new/components/service_navigation.html" %} - {% endif %} - {% endblock %} - {# - The withnav_template can serve as a replacement for both settings_template and org_template.html. - - The file service_navigation.html is included only in withnav_template. It's not used in settings_template. That is one out of the two differences between settings template and withnav template. As a result, when other templates extend settings_template, they include the serviceNavigation block but keep it empty. The settings_template.html is specifically used for these pages in the app: manage-users.html, service-settings.html, and user-profile.html. - - In addition, serviceNavigation should be empty on templates that previously extended org_template. For templates that previously extended org_template.html, there's an addition of the orgNavBreadcrumb block. - {% block orgNavBreadcrumb %} - {% include "/new/components/org_nav_breadcrumb.html" %} - {% endblock %} - #} - {% if current_org.name %} - {% block orgNavBreadcrumb %}{% include "/new/components/org_nav_breadcrumb.html" %}{% endblock %} - {% endif %} -
-
- {% block sideNavigation %} - {% if org_navigation_links %} - {% include "/new/components/org_nav.html" %} - {% else %} - {% include "/new/components/main_nav.html" %} - {% endif %} - {# - Include settings_nav.html for child templates that previously extended settings_template. - - Include "org_nav.html" for child templates that previously extended org_template html - #} - {% endblock %} -
-
- {% block beforeContent %} - {% block backLink %}{% endblock %} - {% endblock %} -
- {% block content %} - {% include 'flash_messages.html' %} - {% block maincolumn_content %}{% endblock %} - {% endblock %} -
-
-
-
-{% endblock %} diff --git a/app/templates/main_nav.html b/app/templates/old/main_nav.html similarity index 98% rename from app/templates/main_nav.html rename to app/templates/old/main_nav.html index 4aac354d9f..4357dc6548 100644 --- a/app/templates/main_nav.html +++ b/app/templates/old/main_nav.html @@ -1,3 +1,4 @@ +{# This template is an old version #} {% if help %} {% include 'partials/tour.html' %} {% else %} diff --git a/app/templates/org_nav.html b/app/templates/old/org_nav.html similarity index 96% rename from app/templates/org_nav.html rename to app/templates/old/org_nav.html index f7f7a5b93a..0511eb9ff0 100644 --- a/app/templates/org_nav.html +++ b/app/templates/old/org_nav.html @@ -1,3 +1,4 @@ +{# This template is an old version #}