diff --git a/.gitconfig b/.gitconfig
index f70bcd581..d6c8c66b1 100644
--- a/.gitconfig
+++ b/.gitconfig
@@ -1,17 +1,21 @@
[secrets]
providers = git secrets --aws-provider
- patterns = (A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}
- patterns = (\"|')?(AWS|aws|Aws)?_?(SECRET|secret|Secret)?_?(ACCESS|access|Access)?_?(KEY|key|Key)(\"|')?\\s*(:|=>|=)\\s*(\"|')?[A-Za-z0-9/\\+=]{40}(\"|')?
- patterns = (\"|')?(AWS|aws|Aws)?_?(ACCOUNT|account|Account)_?(ID|id|Id)?(\"|')?\\s*(:|=>|=)\\s*(\"|')?[0-9]{4}\\-?[0-9]{4}\\-?[0-9]{4}(\"|')?
- patterns = .+_KEY=.+
allowed = [A-Z]+_KEY=..echo \".{S3_CREDENTIALS}\" [|] jq -r .+
allowed = ./tdrs-backend/.env.example:.*
allowed = ./tdrs-backend/docker-compose.yml:57:.*
- allowed = ./tdrs-backend/manifest.proxy.yml:*
+
+ allowed = ./tdrs-frontend/node_modules*
allowed = regexes.json:.*
allowed = ./scripts/copy-login-gov-keypair.sh:14:JWT_KEY=.*
allowed = scripts/deploy-backend.sh:.+:DJANGO_SECRET_KEY=..python -c .from secrets import token_urlsafe. print.token_urlsafe..*
allowed = .git/config:.*
allowed = .gitconfig:.*
- allowed = .*DJANGO_SECRET_KEY=.*
+ allowed = .*DJANGO_SECRET_KEY=.* #this is auto-generated in deployed environments
+ allowed = ./tdrs-backend/manifest.proxy.yml:*
allowed = ./tdrs-backend/plg/loki/manifest.yml:*
+ allowed = ./tdrs-backend/plg/deploy.sh:84
+ patterns = (A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}
+ patterns = (\"|')?(AWS|aws|Aws)?_?(SECRET|secret|Secret)?_?(ACCESS|access|Access)?_?(KEY|key|Key)(\"|')?\\s*(:|=>|=)\\s*(\"|')?[A-Za-z0-9/\\+=]{40}(\"|')?
+ patterns = (\"|')?(AWS|aws|Aws)?_?(ACCOUNT|account|Account)_?(ID|id|Id)?(\"|')?\\s*(:|=>|=)\\s*(\"|')?[0-9]{4}\\-?[0-9]{4}\\-?[0-9]{4}(\"|')?
+ patterns = .+_KEY=.+
+ patterns = .+smtp_auth_password: .[^{]+
diff --git a/.githooks/pre-commit b/.githooks/pre-commit
new file mode 100755
index 000000000..7da1e7bb0
--- /dev/null
+++ b/.githooks/pre-commit
@@ -0,0 +1,4 @@
+#!/bin/bash
+set -e
+
+zsh ./scripts/git-secrets-check.sh local
diff --git a/.githooks/pre-push b/.githooks/pre-push
new file mode 100755
index 000000000..51e4e28ff
--- /dev/null
+++ b/.githooks/pre-push
@@ -0,0 +1,14 @@
+#!/bin/bash
+set -e
+
+task frontend-lint 2>/dev/null
+if [ $? != "0" ]; then
+ echo "Frontend lint failed"
+ exit 1
+fi
+
+task backend-lint 2>/dev/null
+if [ $? != "0" ]; then
+ echo "Backend lint failed"
+ exit 1
+fi
\ No newline at end of file
diff --git a/.github/workflows/build-backend.yml b/.github/workflows/build-backend.yml
deleted file mode 100644
index 26ef5c03e..000000000
--- a/.github/workflows/build-backend.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-###########################################################################
-# GitHub Action Workflow
-# On push to any branch, triggers the back end build and test pipeline
-# if the tdrs-backend has changed.
-#
-# Step 0: make changes on your branch to non-documentation files in
-# tdrs-backend and push changes to your remote branch
-#
-# Step 1: Makes a request to the V2 CircleCI API to initiate the project,
-# which will filter based upon build_and_test_backend
-# to run the workflow/jobs listed here:
-# build-and-test:[
-# test-backend,
-# test-e2e
-# ]
-#
-# Leverages the open source GitHub Action:
-# https://github.com/promiseofcake/circleci-trigger-action
-###########################################################################
-name: Build Only Backend When tdrs-backend/ Files Change
-on:
- push:
- paths: 'tdrs-backend/**'
- branches-ignore:
- - develop
- - main
- - master
-jobs:
- build_and_test_backend:
- runs-on: ubuntu-latest
- name: Build and Test Backend
- steps:
- - uses: actions/checkout@v2
- - name: Circle CI Deployment Trigger
- id: curl-circle-ci
- uses: promiseofcake/circleci-trigger-action@v1
- with:
- user-token: ${{ secrets.CIRCLE_CI_V2_TOKEN }}
- project-slug: ${{ github.repository }}
- branch: ${{ github.ref_name }}
- payload: '{
- "build_and_test_backend": true,
- "triggered": true
- }'
diff --git a/.github/workflows/build-frontend.yml b/.github/workflows/build-frontend.yml
deleted file mode 100644
index b9b60a914..000000000
--- a/.github/workflows/build-frontend.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-###########################################################################
-# GitHub Action Workflow
-# On push to any branch, triggers the front end build and test pipeline
-# if the tdrs-frontend has changed.
-#
-# Step 0: make changes on your branch to non-documentation files in
-# tdrs-frontend and push changes to your remote branch
-#
-# Step 1: Makes a request to the V2 CircleCI API to initiate the project,
-# which will filter based upon build_and_test_frontend
-# to run the workflow/jobs listed here:
-# build-and-test:[
-# test-frontend,
-# test-e2e
-# ]
-#
-# Leverages the open source GitHub Action:
-# https://github.com/promiseofcake/circleci-trigger-action
-###########################################################################
-name: Build Only Frontend When tdrs-frontend Files Change
-on:
- push:
- paths: 'tdrs-frontend/**'
- branches-ignore:
- - develop
- - main
- - master
-jobs:
- build_and_test_frontend:
- runs-on: ubuntu-latest
- name: Build and Test Frontend
- steps:
- - uses: actions/checkout@v2
- - name: Circle CI Deployment Trigger
- id: curl-circle-ci
- uses: promiseofcake/circleci-trigger-action@v1
- with:
- user-token: ${{ secrets.CIRCLE_CI_V2_TOKEN }}
- project-slug: ${{ github.repository }}
- branch: ${{ github.ref_name }}
- payload: '{
- "build_and_test_frontend": true,
- "triggered": true
- }'
diff --git a/.gitignore b/.gitignore
index f6766031a..b627bccc4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,6 +43,7 @@ compliance/opencontrols/
compliance/exports/
tdrs-backend/tdpservice/static/*
*gunicorn.log
+*.log
# don't ignore requirements.txt
!requirements.txt
@@ -115,3 +116,4 @@ cypress.env.json
# DB seeds
tdrs-backend/*.pg
+tdrs-backend/django.log
diff --git a/README.md b/README.md
index c7ed080a9..ce86a895b 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# Temporary Assistance for Needy Families (TANF) Data Portal - TDP
+ # Temporary Assistance for Needy Families (TANF) Data Portal - TDP
Welcome to the project for the New TANF Data Portal, which will replace the legacy TANF Data Reporting System!
diff --git a/Taskfile.yml b/Taskfile.yml
index 9f2488455..93de45d5c 100644
--- a/Taskfile.yml
+++ b/Taskfile.yml
@@ -2,6 +2,11 @@ version: '3'
tasks:
+ gitcfg:
+ desc: Configure git
+ cmds:
+ - git config core.hooksPath .githooks
+
create-network:
desc: Create the external network
cmds:
@@ -12,10 +17,10 @@ tasks:
dir: tdrs-backend
cmds:
- task: create-network
- - docker-compose -f docker-compose.yml up -d --build
- - docker-compose -f docker-compose.yml exec web sh -c "python ./manage.py makemigrations"
- - docker-compose -f docker-compose.yml exec web sh -c "python ./manage.py migrate"
- - docker-compose -f docker-compose.yml down
+ - docker compose -f docker-compose.yml up -d --build
+ - docker compose -f docker-compose.yml exec web sh -c "python ./manage.py makemigrations"
+ - docker compose -f docker-compose.yml exec web sh -c "python ./manage.py migrate"
+ - docker compose -f docker-compose.yml down
- task: sentry-down
clone-sentry-repo:
@@ -43,7 +48,7 @@ tasks:
- docker cp .env sentry:/self-hosted/.env
- docker exec sentry bash -c "cd self-hosted && ./install.sh --skip-user-creation --no-report-self-hosted-issues"
# create a new user
- - docker exec sentry bash -c "cd self-hosted && docker-compose run --rm web createuser --email admin@tanf.com --password admin --superuser"
+ - docker exec sentry bash -c "cd self-hosted && docker compose run --rm web createuser --email admin@tanf.com --password admin --superuser"
# copy backup.json file to sentry
- docker cp backup.json sentry:/self-hosted/sentry/backup.json
# restore backup
@@ -58,56 +63,56 @@ tasks:
desc: Start sentry service
dir: sentry
cmds:
- - docker exec sentry bash -c "cd self-hosted && docker-compose up -d"
+ - docker exec sentry bash -c "cd self-hosted && docker compose up -d"
sentry-down:
desc: Stop sentry service
dir: sentry
cmds:
- - docker exec sentry bash -c "cd self-hosted && docker-compose down"
+ - docker exec sentry bash -c "cd self-hosted && docker compose down"
drop-db:
desc: Drop the backend database
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml down
+ - docker compose -f docker-compose.yml down
- docker volume rm tdrs-backend_postgres_data
backend-up:
desc: Start backend web server
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml up -d
+ - docker compose -f docker-compose.yml up -d
backend-down:
desc: Stop backend web server
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml down
+ - docker compose -f docker-compose.yml down
backend-logs:
desc: Show and follow backend web server logs
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml logs -f
+ - docker compose -f docker-compose.yml logs -f
backend-restart:
desc: Restart backend web server
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml restart
+ - docker compose -f docker-compose.yml restart
backend-bash:
desc: Open a shell in the backend container
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml exec web sh
+ - docker compose -f docker-compose.yml exec web sh
backend-shell:
desc: Open a Django shell in the backend container
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml exec web sh -c "python ./manage.py shell"
+ - docker compose -f docker-compose.yml exec web sh -c "python ./manage.py shell"
backend-exec:
desc: Execute a command in the backend container
@@ -115,7 +120,7 @@ tasks:
vars:
CMD: '{{.CMD}}'
cmds:
- - docker-compose -f docker-compose.yml exec web sh -c "python manage.py {{.CMD}}"
+ - docker compose -f docker-compose.yml exec web sh -c "python manage.py {{.CMD}}"
backend-exec-seed-db:
desc: Execute seed_db command in the backend container
@@ -123,8 +128,8 @@ tasks:
vars:
CMD: '{{.CMD}}'
cmds:
- - docker-compose -f docker-compose.yml up -d
- - docker-compose -f docker-compose.yml exec web sh -c "python manage.py populate_stts; python ./manage.py seed_db"
+ - docker compose -f docker-compose.yml up -d
+ - docker compose -f docker-compose.yml exec web sh -c "python manage.py populate_stts; python ./manage.py seed_db"
backend-pytest:
desc: 'Run pytest in the backend container E.g: task backend-pytest PYTEST_ARGS="tdpservice/test/ -s -vv"'
@@ -133,20 +138,20 @@ tasks:
PYTEST_ARGS: '{{.PYTEST_ARGS | default "."}}'
cmds:
- task backend-up
- - docker-compose -f docker-compose.yml exec web sh -c "pytest {{.PYTEST_ARGS}}"
+ - docker compose -f docker-compose.yml exec web sh -c "pytest {{.PYTEST_ARGS}}"
backend-remove-volumes:
desc: Remove the backend volumes
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml down -v
+ - docker compose -f docker-compose.yml down -v
backend-lint:
desc: Run flake8 in the backend container
dir: tdrs-backend
cmds:
- task backend-up
- - docker-compose -f docker-compose.yml exec web sh -c "flake8 . && if [ $? -eq 0 ]; then echo 'Flake8 linter found no issues'; fi"
+ - docker compose -f docker-compose.yml exec -T web sh -c "flake8 . && if [ $? -eq 0 ]; then echo 'Flake8 linter found no issues'; fi"
backend-pip-lock:
#TODO: Add a task to lock the pip dependencies
@@ -154,16 +159,16 @@ tasks:
dir: tdrs-backend
cmds:
- task: backend-up
- - docker-compose -f docker-compose.yml exec web sh -c "pipenv lock"
+ - docker compose -f docker-compose.yml exec web sh -c "pipenv lock"
psql:
desc: Open a psql shell in the backend container
dir: tdrs-backend
cmds:
- task create-network || true
- - docker-compose -f docker-compose.yml up -d postgres
+ - docker compose -f docker-compose.yml up -d postgres
- sleep 5
- - docker-compose -f docker-compose.yml exec postgres sh -c "psql -U tdpuser -d tdrs_test"
+ - docker compose -f docker-compose.yml exec postgres sh -c "psql -U tdpuser -d tdrs_test"
clean:
desc: Remove all containers, networks, and volumes
@@ -177,25 +182,25 @@ tasks:
desc: Start clamav service
dir: tdrs-backend
cmds:
- - docker-compose -f docker-compose.yml up -d clamav-rest
+ - docker compose -f docker-compose.yml up -d clamav-rest
frontend-up:
desc: Start frontend web server
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.yml up -d
+ - docker compose -f docker-compose.yml up -d
frontend-down:
desc: Stop frontend web server
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.yml down
+ - docker compose -f docker-compose.yml down
frontend-restart:
desc: Restart frontend web server
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.yml restart
+ - docker compose -f docker-compose.yml restart
frontend-av:
desc: Start frontend with optional clamav service
@@ -210,43 +215,51 @@ tasks:
desc: Initialize the frontend project
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.yml up -d --build
- - docker-compose -f docker-compose.yml exec tdp-frontend sh -c "apk add nodejs npm"
- - docker-compose -f docker-compose.yml exec tdp-frontend sh -c "npm install"
- - docker-compose -f docker-compose.yml down
+ - docker compose -f docker-compose.yml up -d --build
+ - docker compose -f docker-compose.yml exec tdp-frontend sh -c "apk add nodejs npm"
+ - docker compose -f docker-compose.yml exec tdp-frontend sh -c "npm install"
+ - docker compose -f docker-compose.yml down
frontend-test:
desc: Run frontend tests
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.local.yml up tdp-frontend-test -d
- - docker-compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test"
+ - docker compose -f docker-compose.local.yml up tdp-frontend-test -d
+ - docker compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test"
frontend-test-cov:
desc: Run frontend tests with coverage
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.local.yml up tdp-frontend-test -d
- - docker-compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test:cov"
+ - docker compose -f docker-compose.local.yml up tdp-frontend-test -d
+ - docker compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test:cov"
+
+ cypress:
+ desc: Run cypress tests
+ dir: tdrs-frontend
+ cmds:
+ - docker-compose -f docker-compose.local.yml up --build tdp-frontend-test -d
+ - npm run test:e2e
+
frontend-lint:
desc: Run eslint in the frontend container
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.local.yml up -d tdp-frontend-test --quiet-pull
- - docker-compose -f docker-compose.yml exec tdp-frontend-test sh -c "npm run lint"
+ - docker compose -f docker-compose.local.yml up -d tdp-frontend-test --quiet-pull
+ - docker compose -f docker-compose.yml exec -T tdp-frontend-test sh -c "npm run lint"
frontend-logs:
desc: Show and follow frontend web server logs
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.yml logs -f
+ - docker compose -f docker-compose.yml logs -f
frontend-bash:
desc: Open a shell in the frontend container
dir: tdrs-frontend
cmds:
- - docker-compose -f docker-compose.yml exec tdp-frontend bash
+ - docker compose -f docker-compose.yml exec tdp-frontend bash
up:
desc: Start both frontend and backend web servers
diff --git a/docs/Technical-Documentation/tech-memos/cypress-auth.md b/docs/Technical-Documentation/tech-memos/cypress-auth.md
new file mode 100644
index 000000000..3ca504eba
--- /dev/null
+++ b/docs/Technical-Documentation/tech-memos/cypress-auth.md
@@ -0,0 +1,138 @@
+# Cypress E2E
+
+**Audience**: TDP Software Engineers
+
+{% if action == "added" %}
+ The following user account for the TANF Data Portal (TDP) has been assigned to OFA System Admin group: The following user account for the TANF Data Portal (TDP) has been removed from OFA System Admin group: The following user account for the TANF Data Portal (TDP) has been assigned to OFA Staff group: The following user account for the TANF Data Portal (TDP) has been removed from OFA Staff group: The following user account for the TANF Data Portal (TDP) has been assigned to OFA Superuser group: The following user account for the TANF Data Portal (TDP) has been removed from OFA Superuser group:
+**Subject**: Cypress Refactor
+**Date**: October 16th, 2024
+
+## Summary
+Digging into our pipeline failures associated in ticket #3141, it was found that our cypress authentication was not persisting for the admin user within a single routine that also used the STT user. Further investigation showed our cypress code is not easily extensible and has not only issues with CSRF compliance but scenario-specific authentication as opposed to abstracted and compartmented sessions. While splitting the scenario and/or using `cypress.wait()` might temporarily solve one problem, we have uncovered technical debt requiring refactoring of this code.
+
+### Background
+Debugging the failures within the pipeline had 3 recurring issues:
+1. Referer not found as in [this post](https://github.com/cypress-io/cypress/issues/16975)
+2. `adminApiRequest` failed to update status, resulting in next step failure.
+3. Errors regarding Django's 'csrf_middleware_token`.
+
+By addressing authentication in a standard way and storing all session cookies and tokens, we should be able to resolve these 3 issues.
+
+## Out of Scope
+* Any changes to frontend ReactJS and Nginx apps
+* Significant changes to backend authentication
+* New Cypress workflows beyond our end-to-end test against deployed develop branch
+
+## Method/Design
+
+### Abstracted Gherkin Steps
+Presently, many of the defined Javascript functions for a given Gherkin step are bespoke or single-use instead of abstracted and should be adapted. Additionally, it was found that sessions were lingering between Gherkin scenarios as we did not have generic `setup` and `teardown` implementations ahead of these. Sufficient utilization of abstraction within the scenarios which are now doing setup/teardown between scenarios and proper session management should result in a cleaner Cypress execution and make future additions simpler.
+
+Before:
+```
+ Scenario: A new user is put in the pending state
+ Given The admin logs in
+ And 'new-cypress@teamraft.com' is in begin state
+ When 'new-cypress@teamraft.com' visits the home page
+ And 'new-cypress@teamraft.com' logs in
+ Then 'new-cypress@teamraft.com' requests access
+ And The admin sets the approval status of 'new-cypress@teamraft.com' to 'Pending'
+ Then 'new-cypress@teamraft.com' sees the request still submitted
+```
+
+There are specific functions for each of the Gherkin Steps and they might rely on the setup steps such as "user is in begin state" which could be handled if we utilized Cypress setup steps:
+
+After:
+```
+ Scenario: A new user is put in the pending state
+ Given 'new-cypress@teamraft.com' logs in
+ And 'new-cypress@teamraft.com' requests access
+ When The admin sets the approval status of 'new-cypress@teamraft.com' to 'Pending'
+ Then 'new-cypress@teamraft.com' sees the request still submitted
+```
+
+Setup/Teardown hook psuedo-code:
+```JavaScript
+describe('E2E User Approval Flow', ()=> {
+ beforeEach(() => {
+ cy.AdminLogsIn(kwargs)
+ cy.UserIsInBeginState(user)
+ }))
+
+ afterEach(() => {
+ cy.get(@testTeardownId).then(id => {
+ cy.resetUser(user)
+ cy.resetFilesUploaded(user)
+ })
+ }
+})
+
+When('The admin sets the approval status of {string} to {string}',
+ (username, status) => {
+ // proceed with your test steps
+ }
+```
+
+- [Cypress Teardown Hook Blog Post](https://medium.com/@joydeep56053/how-to-implement-test-teardown-hook-in-cypress-671fc9667e07)
+
+### Abstracted utility authentication functions
+Our current Cypress implementation has Gherkin scenarios `accounts.feature` which relies on definitions in `accounts.js`, `common-steps.js`, and finally `commands.js` which handle authentication in different ways for different scenarios (e.g., `login()`, `adminLogin()`, and `adminApiRequest()`)
+
+These current functions do not handle the new django `crsf_middleware_token` which may be required for smooth operation. We will move to a standardized authentication function with wrappers which will make the Gherkin scenarios uniform in their approach to authentication and session management.
+
+### Session Management
+These new implementations will need to leverage newer Cypress commands `session` and `intercept` for managing our two-user scenarios.
+
+```Javascript
+const login = (name) => {
+ cy.session(name, () => {
+ cy.request({
+ method: 'POST',
+ url: '/login',
+ body: { name, password: 's3cr3t' },
+ }).then(({ body }) => {
+ window.localStorage.setItem('authToken', body.token)
+ })
+ })
+}
+
+it('should transfer money between users', () => {
+ login('user')
+ cy.visit('/transfer')
+ cy.get('#amount').type('100.00')
+ cy.get('#send-money').click()
+
+ login('other-user')
+ cy.visit('/account_balance')
+ cy.get('#balance').should('eq', '100.00')
+})
+```
+[Session Documentation](https://docs.cypress.io/api/commands/session#Switching-sessions-inside-tests)
+
+```Javascript
+// spying
+cy.intercept('/users/**')
+cy.intercept('GET', '/users*')
+cy.intercept({
+ method: 'GET',
+ url: '/users*',
+ hostname: 'localhost',
+})
+
+// spying and response stubbing
+cy.intercept('POST', '/users*', {
+ statusCode: 201,
+ body: {
+ name: 'Peter Pan',
+ },
+})
+
+// spying, dynamic stubbing, request modification, etc.
+cy.intercept('/users*', { hostname: 'localhost' }, (req) => {
+ /* do something with request and/or response */
+})
+```
+[Intercept Documentation](https://docs.cypress.io/api/commands/intercept)
+
+## Affected Systems
+Existing Django CypressAuth class, django middleware, and existing Nginx implementation.
+
+## Use and Test cases to consider
+Test E2E Deployment pipelines and future Cypress integration tests.
\ No newline at end of file
diff --git a/docs/Technical-Documentation/tech-memos/priotitized-errors/prioritized-errors.md b/docs/Technical-Documentation/tech-memos/priotitized-errors/prioritized-errors.md
new file mode 100644
index 000000000..931bceb47
--- /dev/null
+++ b/docs/Technical-Documentation/tech-memos/priotitized-errors/prioritized-errors.md
@@ -0,0 +1,100 @@
+# TDP Prioritized Parser Errors
+
+**Audience**: TDP Software Engineers
+**Subject**: Prioritized Errors
+**Date**: October 20, 2024
+
+## Summary
+This technical memorandum provides a suggested path to implement a set of new requirements OFA has generated to alleviate the sheer number of parser errors generated during a STT's data submission. OFA has indicated that some errors are of a lower priority for STTs to review and correct. Thus, the OFA team has requested that "critical" be assigned to parser errors so that the report STTs receive is filtered down to only the critical errors that must be reviewed and fixed. Regardless of how errors are prioritized, STTs will still retain the ability to see a summary of all errors detected in the error report.
+
+## Background
+Currently, error reports are generated in the TDP backend via the `get_xls_serialized_file` function. This function accepts the serialized queryset of the appropriate `ParserError`s queryset. This function the writes an XLSX file and returns it to the user. Apart from the lack of priotization in the report generated from this function, it also introduces the possibility to cause an out of memory (OOM) error. This can occur because, the Django model serializer brings the entire queryset into memory to serialize it into JSON. Because these ParserError querysets can be very large (hundreds of thousands), we will also alleviate the memory pressure `get_xls_serialized_file` introduces by removing the Django model serializer and make use of queryset pagination.
+
+## Out of Scope
+Current requirements from OFA do not require category two errors to be queryable by value and expected value. That feature is out of scope within the tech memo and would require more design and implementation work.
+
+## Method/Design
+Given the current OFA requirements, we can implement prioritized/critical errors, and memory efficient report generation without too much work. OFA has provided [this OneNote](https://gorafttech.sharepoint.com/:o:/s/TDRSResearchDesign/EnIa1Mn4v7pOskW7BLomXhIBxUMlYLRU_f1C0dxemW7dWw?e=m0rNyI) document which outlines the error types, errors, and fields that are most important/prioritized for STTs to see.
+
+### Memory Efficient Report Generation
+As previously mentioned in the #background section, the `get_xls_serialized_file` introduces a method to serialize parser errors into a XLSX that requires the entire queryset of parser errors to be brought into memory. Because these querysets can be very large, having them in memory regularly kills Gunicorn workers with an OOM error. To remedy the issue, this tech memo suggests updating `get_xls_serialized_file` to not use Django model serializers and instead leverage the power of Django querysets and pagination. To accomplish this, instead of passing a JSON serialized querset to `get_xls_serialized_file`, a standard (un-evaluated) queryset should be passed. Then, the body of the `get_xls_serialized_file` function should be updated appropriately to use a queryset object instead of a JSON object to generate the XLSX spreadsheet. The updates should also include paginating the queryset to avoid bringing the entirety of the queryset into memory at any one time. The code snippet below provides an example of paginating the queryset and writing the appropriate fields of each entry to the XLSX report.
+
+```python
+paginator = Paginator(parser_errors, settings.BULK_CREATE_BATCH_SIZE)
+row_idx = 6
+for page in paginator:
+ for record in page.object_list:
+ rpt_month_year = str(getattr(record, 'rpt_month_year', None))
+ fields_json = getattr(record, 'fields_json', {})
+
+ worksheet.write(row_idx, 0, record.case_number)
+ worksheet.write(row_idx, 1, rpt_month_year[:4])
+ worksheet.write(row_idx, 2, calendar.month_name[int(rpt_month_year[4:])] if rpt_month_year[4:] else None)
+ worksheet.write(row_idx, 3, format_error_msg(record.error_message, fields_json))
+ worksheet.write(row_idx, 4, record.item_number)
+ worksheet.write(row_idx, 5, friendly_names(fields_json))
+ worksheet.write(row_idx, 6, internal_names(fields_json))
+ worksheet.write(row_idx, 7, record.row_number)
+ worksheet.write(row_idx, 8, str(ParserErrorCategoryChoices(record.error_type).label))
+```
+
+The three helper functions: `format_error_msg`, `friendly_names`, `internal_names` used to write the appropriate fields can be seen below.
+
+```python
+def format_error_msg(error_msg, fields_json):
+ """Format error message."""
+ for key, value in fields_json['friendly_name'].items():
+ error_msg = error_msg.replace(key, value) if value else error_msg
+ return error_msg
+
+
+def friendly_names(fields_json):
+ """Return comma separated string of friendly names."""
+ return ','.join([i for i in fields_json['friendly_name'].values()])
+
+
+def internal_names(fields_json):
+ """Return comma separated string of internal names."""
+ return ','.join([i for i in fields_json['friendly_name'].keys()])
+```
+
+### Prioritized/Critical Errors
+[This OneNote](https://gorafttech.sharepoint.com/:o:/s/TDRSResearchDesign/EnIa1Mn4v7pOskW7BLomXhIBxUMlYLRU_f1C0dxemW7dWw?e=m0rNyI) is invaluable to the implementation of prioritized errors. Prioritizing errors could be a very large and technically challenging feature involving new migrations, validation/validator refactors, etc... However, this can all be avoided by making a key insight for each of the category two and category three validators by way of OFA's requirements for them. For the category two case, the OneNote document generically specifies category two validation surrounding: Family Affiliation, Citizenship and Closure reason. Further discussion with OFA indicated that it is important/a priority for a STT to see all category two errors encompassing these fields in their entirety. That makes prioritizing these category two errors extremely easy because the need to query those fields by specific values and expected values is not required. The queries below provide a complete implementation to query all category two errors encompassing those fields.
+
+```python
+# All cat2 errors associated with FAMILY_AFFILIATION and (CITIZENSHIP_STATUS or CLOSURE_REASON)
+second_field = "CITIZENSHIP_STATUS" if is_active else "CLOSURE_REASON"
+field_query = Q(field_name="FAMILY_AFFILIATION") | Q(field_name=second_field)
+filtered_errors = filtered_errors.union(all_errors.filter(
+ field_query,
+ error_type=ParserErrorCategoryChoices.FIELD_VALUE
+ ))
+```
+
+The key insight for the category three case is less obvious. Looking at the OneNote, it seems as though we might need to query errors based on field name(s), expected value and actual value. However, for category three errors that information is encoded into the error by its existence. For example, the OneNote indicates that a high priority error a STT should have included in their report is `If fam affil = 1 then SSN must be valid `. This exact error and it's values (expected and given) can be uniquely found in any of the active or closed case record schemas. E.g.:
+
+```python
+category3.ifThenAlso(
+ condition_field_name='FAMILY_AFFILIATION',
+ condition_function=category3.isEqual(1),
+ result_field_name='SSN',
+ result_function=category3.validateSSN(),
+)
+```
+
+The existence of this error, with these fields, is uniquely defined in the appropriate schemas. The same can be said for the remaining critical category three errors. Thus, to define the high priority errors we need only know the required field(s) and their error type. Given those pieces of information, queries of the form below can be used to filter STT error reports to only show the highest priority errors.
+
+```python
+errors.filter(fields_json__friendly_name__has_keys=[FIELD_NAME, FIELD_NAME, ETC...],
+ error_type=ParserErrorCategoryChoices.VALUE_CONSISTENCY)
+```
+
+By unioning the category two queries from above with the remainder of the category three queries, a queryset containing only the critical errors can be generated and subsequently passed to `get_xls_serialized_file` generate and return the prioritized error report to the requesting STT.
+
+## Affected Systems
+- TDP backend
+- TDP frontend: latency time incurred while generating report
+
+## Use and Test cases to consider
+- Admin and STT receive the same report
+- Existing tests leveraging ParserError querysets are updated and re-validated for correctness
diff --git a/scripts/git-secrets-check.sh b/scripts/git-secrets-check.sh
index f371f303e..dcfcd7821 100755
--- a/scripts/git-secrets-check.sh
+++ b/scripts/git-secrets-check.sh
@@ -1,29 +1,57 @@
#!/bin/bash
set -e
+islocal=$1
-if [ -d /tmp/git-secrets ]; then
+if [[ $(uname -s) == "Darwin" ]]; then # Mac OSX check
+ gs_path="/usr/local/bin"
+else # Linux, we're likely running in CircleCI
+ gs_path="/usr/sbin"
+fi
+
+if [ -f "$gs_path/git-secrets" ]; then
echo The command git-secrets is available
else
echo The command git-secrets is not available, cloning...
git clone git@github.com:awslabs/git-secrets.git /tmp/git-secrets/
if [ -f /tmp/git-secrets/git-secrets ]; then
- echo "Moving git secrets into PATH"
- sudo cp /tmp/git-secrets/git-secrets /usr/sbin/
+
+ echo "Moving git secrets into PATH"
+ sudo cp /tmp/git-secrets/git-secrets $gs_path/
+ $gs_path/git-secrets --install -f
+ rm -rf /tmp/git-secrets #cleanup of clone dir
else
- echo "Git clone failed for git-secrets"
+ echo "Git clone failed for git-secrets"
fi
fi
# ensure we have correct configs in place
-[ -f ../.gitconfig ]
-cat .gitconfig >> .git/config
-echo "Git-Secrets Config loaded:"
-grep -A10 secrets .git/config
-# grep will return non-zero code if nothing found, failing the build
+if [ -f .gitconfig ]; then
+ cat .gitconfig >> .git/config
+ echo "Git-Secrets Config loaded:"
+ grep -A10 secrets .git/config
+ # grep will return non-zero code if nothing found, failing the build
+fi
-echo "git-secrets-check.sh: Scanning repo ..."
-git secrets --scan -r ../
-retVal=$?
+if [ $islocal ]; then
+ echo "git-secrets-check.sh: Scanning files staged for commit ..."
+ setopt shwordsplit
+ staged_files=$(git diff --cached --name-status | grep -vE "D|^R[0-9]+"| cut -f2 | xargs)
+
+ for filename in $staged_files; do
+ echo "git-secrets-check.sh: Scanning $filename ..."
+ git secrets --scan $filename
+ retVal=$?
+ if [[ $retVal -ne 0 ]]; then
+ echo "git-secrets found issues, prevented commit."
+ return 1
+ fi
+ done
+
+else
+ echo "git-secrets-check.sh: Scanning repo ..."
+ git secrets --scan -r ../
+ retVal=$?
+fi
# if there are issues, they will be listed then script will abort here
if [[ $retVal -eq 0 ]]; then
@@ -32,4 +60,3 @@ else
echo "git-secrets-check.sh: Issues found with return code $retVal, please remediate."
return 1
fi
-
diff --git a/tdrs-backend/Dockerfile b/tdrs-backend/Dockerfile
index 34ef5dd9b..6b908eee6 100644
--- a/tdrs-backend/Dockerfile
+++ b/tdrs-backend/Dockerfile
@@ -9,7 +9,7 @@ ENV DJANGO_SETTINGS_MODULE=tdpservice.settings.local
ENV DJANGO_CONFIGURATION=Local
# Allows docker to cache installed dependencies between builds
COPY Pipfile Pipfile.lock /tdpapp/
-COPY sources.list /etc/apt/sources.list
+# COPY sources.list /etc/apt/sources.list
WORKDIR /tdpapp/
# Download latest listing of available packages:
RUN apt-get -y update
diff --git a/tdrs-backend/plg/README.md b/tdrs-backend/plg/README.md
index f0438e8f4..900a14b93 100644
--- a/tdrs-backend/plg/README.md
+++ b/tdrs-backend/plg/README.md
@@ -1,3 +1,31 @@
+# TDP PLG Stack
+Before attempting to deploy the PLG stack or an postgres exporter you MUST have access to the production space in cloud.gov.
+
+## Deploying PLG
+Before deploying the PLG stack you must have the `ADMIN_EMAILS` and `DEV_EMAILS` variables defined in your shell environment. The variables should be a comma separated string of emails, eg: `ADMIN_EMAILS="email1@email.com, email2@email.com, email3@email.com"` and `DEV_EMAILS="email4@email.com, email5@email.com, email6@email.com"`.
+
+Once both of the above items have been confirmed, you can target the production environment with the CF CLI and run the command below.
+
+```
+./deploy.sh -a -d tdp-db-prod
+```
+
+The command will deploy the entire PLG stack to the production environment and setup all appropriate network policies and routes.
+
+## Deploying a Postgres Exporter
+Before deploying a postgres exporter, you need to acquire the AWS RDS database URI for the RDS instance in the environment you are deploying the exporter to.
+
+```
+cf env
Account Information:
+Thank you,
+ TDP Team +{% endblock %} diff --git a/tdrs-backend/tdpservice/parsers/models.py b/tdrs-backend/tdpservice/parsers/models.py index f9c5f3c63..f1e470e6e 100644 --- a/tdrs-backend/tdpservice/parsers/models.py +++ b/tdrs-backend/tdpservice/parsers/models.py @@ -2,24 +2,15 @@ import datetime from django.db import models -from django.utils.translation import gettext_lazy as _ from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from tdpservice.data_files.models import DataFile +from tdpservice.data_files.util import ParserErrorCategoryChoices + import logging logger = logging.getLogger(__name__) -class ParserErrorCategoryChoices(models.TextChoices): - """Enum of ParserError error_type.""" - - PRE_CHECK = "1", _("File pre-check") - FIELD_VALUE = "2", _("Record value invalid") - VALUE_CONSISTENCY = "3", _("Record value consistency") - CASE_CONSISTENCY = "4", _("Case consistency") - SECTION_CONSISTENCY = "5", _("Section consistency") - HISTORICAL_CONSISTENCY = "6", _("Historical consistency") - class ParserError(models.Model): """Model representing a parser error.""" @@ -139,7 +130,7 @@ def get_status(self): return DataFileSummary.Status.REJECTED elif errors.count() == 0: return DataFileSummary.Status.ACCEPTED - elif row_precheck_errors.count() > 0 or case_consistency_errors.count() > 0: + elif (row_precheck_errors.count() > 0 or case_consistency_errors.count()): return DataFileSummary.Status.PARTIALLY_ACCEPTED else: return DataFileSummary.Status.ACCEPTED_WITH_ERRORS diff --git a/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py b/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py index 82d5c2c46..29b6cb564 100644 --- a/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py +++ b/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py @@ -94,7 +94,7 @@ condition_field_name='FAMILY_AFFILIATION', condition_function=category3.isEqual(1), result_field_name='CITIZENSHIP_STATUS', - result_function=category3.isOneOf((1, 2)), + result_function=category3.isOneOf((1, 2, 3)), ), category3.ifThenAlso( condition_field_name='FAMILY_AFFILIATION', @@ -317,7 +317,7 @@ startIndex=48, endIndex=49, required=False, - validators=[category2.isGreaterThan(0)] + validators=[category2.isGreaterThan(0, inclusive=True)] ), Field( item="32E", diff --git a/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m3.py b/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m3.py index 6f44c551e..64285ba6e 100644 --- a/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m3.py +++ b/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m3.py @@ -92,7 +92,7 @@ condition_field_name='FAMILY_AFFILIATION', condition_function=category3.isEqual(1), result_field_name='CITIZENSHIP_STATUS', - result_function=category3.isOneOf((1, 2)), + result_function=category3.isOneOf((1, 2, 3)), ), category3.ifThenAlso( condition_field_name='FAMILY_AFFILIATION', @@ -409,7 +409,7 @@ condition_field_name='FAMILY_AFFILIATION', condition_function=category3.isEqual(1), result_field_name='CITIZENSHIP_STATUS', - result_function=category3.isOneOf((1, 2)), + result_function=category3.isOneOf((1, 2, 3)), ), category3.ifThenAlso( condition_field_name='FAMILY_AFFILIATION', diff --git a/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py b/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py index 8f9aba575..9dc92acd1 100644 --- a/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py +++ b/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py @@ -66,12 +66,6 @@ result_field_name="WORK_REQ_SANCTION", result_function=category3.isOneOf((1, 2)), ), - category3.ifThenAlso( - condition_field_name="SANC_REDUCTION_AMT", - condition_function=category3.isGreaterThan(0), - result_field_name="FAMILY_SANC_ADULT", - result_function=category3.isOneOf((1, 2)), - ), category3.ifThenAlso( condition_field_name="SANC_REDUCTION_AMT", condition_function=category3.isGreaterThan(0), @@ -635,7 +629,7 @@ endIndex=114, required=False, validators=[ - category2.isOneOf(["9", " "]), + category2.isOneOf(["9", "0", " "]), category2.isAlphaNumeric(), ], ), @@ -658,7 +652,7 @@ endIndex=117, required=False, validators=[ - category2.isOneOf([1, 2]), + category2.isOneOf([0, 1, 2]), ], ), Field( diff --git a/tdrs-backend/tdpservice/parsers/test/test_parse.py b/tdrs-backend/tdpservice/parsers/test/test_parse.py index d01a44030..1e9cd3840 100644 --- a/tdrs-backend/tdpservice/parsers/test/test_parse.py +++ b/tdrs-backend/tdpservice/parsers/test/test_parse.py @@ -498,7 +498,7 @@ def test_parse_ssp_section1_datafile(ssp_section1_datafile, dfs): assert cat4_errors[1].error_message == "Duplicate record detected with record type M3 at line 3273. " + \ "Record is a duplicate of the record at line number 3272." - assert parser_errors.count() == 32488 + assert parser_errors.count() == 32455 assert SSP_M1.objects.count() == expected_m1_record_count assert SSP_M2.objects.count() == expected_m2_record_count diff --git a/tdrs-backend/tdpservice/stts/models.py b/tdrs-backend/tdpservice/stts/models.py index b883ded74..b960d0e55 100644 --- a/tdrs-backend/tdpservice/stts/models.py +++ b/tdrs-backend/tdpservice/stts/models.py @@ -4,6 +4,9 @@ from django.db.models import constraints +DEFAULT_NUMBER_OF_SECTIONS = 4 + + class Region(models.Model): """A model representing a US region.""" @@ -39,6 +42,14 @@ class EntityType(models.TextChoices): ssp = models.BooleanField(default=False, null=True) sample = models.BooleanField(default=False, null=True) + @property + def num_sections(self): + """The number of sections this STT submits.""" + if self.filenames is None: + return DEFAULT_NUMBER_OF_SECTIONS + divisor = int(self.ssp) + 1 + return len(self.filenames) // divisor + class Meta: """Metadata.""" diff --git a/tdrs-backend/tdpservice/stts/serializers.py b/tdrs-backend/tdpservice/stts/serializers.py index be2ec88b6..7774e87ab 100644 --- a/tdrs-backend/tdpservice/stts/serializers.py +++ b/tdrs-backend/tdpservice/stts/serializers.py @@ -14,7 +14,7 @@ class Meta: """Metadata.""" model = STT - fields = ["id", "type", "postal_code", "name", "region", "filenames", "stt_code", "ssp",] + fields = ["id", "type", "postal_code", "name", "region", "filenames", "stt_code", "ssp", "num_sections"] def get_postal_code(self, obj): """Return the state postal_code.""" diff --git a/tdrs-backend/tdpservice/users/apps.py b/tdrs-backend/tdpservice/users/apps.py index 5cb2627fd..48edb5b6d 100644 --- a/tdrs-backend/tdpservice/users/apps.py +++ b/tdrs-backend/tdpservice/users/apps.py @@ -8,3 +8,7 @@ class UsersConfig(AppConfig): name = "tdpservice.users" verbose_name = "Users" + + def ready(self): + """Import signals.""" + import tdpservice.users.signals # noqa diff --git a/tdrs-backend/tdpservice/users/models.py b/tdrs-backend/tdpservice/users/models.py index 40f8dc900..3cf094264 100644 --- a/tdrs-backend/tdpservice/users/models.py +++ b/tdrs-backend/tdpservice/users/models.py @@ -118,9 +118,11 @@ def __str__(self): """Return the username as the string representation of the object.""" return self.username - def is_in_group(self, group_name: str) -> bool: - """Return whether or not the user is a member of the specified Group.""" - return self.groups.filter(name=group_name).exists() + def is_in_group(self, group_names: list) -> bool: + """Return whether or not the user is a member of the specified Group(s).""" + if type(group_names) == str: + group_names = [group_names] + return self.groups.filter(name__in=group_names).exists() def validate_location(self): """Throw a validation error if a user has a location type incompatable with their role.""" @@ -180,6 +182,11 @@ def is_ocio_staff(self) -> bool: """Return whether or not the user is in the ACF OCIO Group.""" return self.is_in_group("ACF OCIO") + @property + def is_an_admin(self) -> bool: + """Return whether or not the user is in the OFA Admin Group or OFA System Admin.""" + return self.is_in_group(["OFA Admin", "OFA System Admin"]) + @property def is_ofa_sys_admin(self) -> bool: """Return whether or not the user is in the OFA System Admin Group.""" diff --git a/tdrs-backend/tdpservice/users/signals.py b/tdrs-backend/tdpservice/users/signals.py new file mode 100644 index 000000000..e22ab4561 --- /dev/null +++ b/tdrs-backend/tdpservice/users/signals.py @@ -0,0 +1,62 @@ +"""Signals for the users app.""" +from django.db.models.signals import m2m_changed, pre_save, post_save +from django.dispatch import receiver +from tdpservice.users.models import User +from django.contrib.auth.models import Group +from tdpservice.email.helpers.admin_notifications import email_system_owner_system_admin_role_change + +import logging +logger = logging.getLogger() + +@receiver(m2m_changed, sender=User.groups.through) +def user_group_changed(sender, instance, action, pk_set, **kwargs): + """Send an email to the System Owner when a user is assigned or removed from the System Admin role.""" + ACTIONS = { + 'PRE_REMOVE': 'pre_remove', + 'PRE_ADD': 'pre_add', + 'PRE_CLEAR': 'pre_clear' + } + if pk_set: + ADMIN_GROUP_PK = Group.objects.get(name="OFA System Admin").pk + group_change_list = [pk for pk in pk_set] + if ADMIN_GROUP_PK in group_change_list and action == ACTIONS['PRE_ADD']: + # EMAIL ADMIN GROUP ADDED to OFA ADMIN + email_system_owner_system_admin_role_change(instance, "added") + elif ADMIN_GROUP_PK in group_change_list and action == ACTIONS['PRE_REMOVE']: + # EMAIL ADMIN GROUP REMOVED from OFA ADMIN + email_system_owner_system_admin_role_change(instance, "removed") + elif pk_set is None and action == ACTIONS['PRE_CLEAR']: + # EMAIL ADMIN GROUP REMOVED from OFA ADMIN + email_system_owner_system_admin_role_change(instance, "removed") + +@receiver(pre_save, sender=User) +def user_is_staff_superuser_changed(sender, instance, **kwargs): + """Send an email to the System Owner when a user is assigned or removed from the System Admin role.""" + # first get instance from db for existing state + try: + current_user_state = User.objects.get(pk=instance.pk) + except User.DoesNotExist: + return + + # check if is_staff is assigned + if instance.is_staff and not current_user_state.is_staff: + email_system_owner_system_admin_role_change(instance, "is_staff_assigned") + # check if is_staff is removed + elif not instance.is_staff and current_user_state.is_staff: + email_system_owner_system_admin_role_change(instance, "is_staff_removed") + # check if is_superuser is assigned + if instance.is_superuser and not current_user_state.is_superuser: + email_system_owner_system_admin_role_change(instance, "is_superuser_assigned") + # check if is_superuser is removed + elif not instance.is_superuser and current_user_state.is_superuser: + email_system_owner_system_admin_role_change(instance, "is_superuser_removed") + + +@receiver(post_save, sender=User) +def user_is_staff_superuser_created(sender, instance, created, **kwargs): + """Send an email to the System Owner when a user is assigned or removed from the System Admin role.""" + if created: + if instance.is_staff: + email_system_owner_system_admin_role_change(instance, "is_staff_assigned") + if instance.is_superuser: + email_system_owner_system_admin_role_change(instance, "is_superuser_assigned") diff --git a/tdrs-backend/tdpservice/users/test/test_signals.py b/tdrs-backend/tdpservice/users/test/test_signals.py new file mode 100644 index 000000000..218e71113 --- /dev/null +++ b/tdrs-backend/tdpservice/users/test/test_signals.py @@ -0,0 +1,41 @@ +"""Test signals.""" +import pytest +from unittest.mock import patch, call +from tdpservice.users.models import User +from tdpservice.users.test.factories import AdminUserFactory +from django.contrib.auth.models import Group +import logging +import django + + +logger = logging.getLogger(__name__) + + +@pytest.mark.django_db +def test_my_signal_receiver(mocker): + """Test my_signal_receiver.""" + with patch("django.db.models.signals.m2m_changed.send") as mock_receiver: + instance = AdminUserFactory.create() + instance.groups.add(Group.objects.get(name="OFA System Admin")) + + mock_receiver.assert_called_with( + sender=User.groups.through, + instance=instance, + action="post_add", + pk_set={Group.objects.get(name="OFA System Admin").pk}, + reverse=False, + using="default", + model=django.contrib.auth.models.Group, + ) + mock_receiver.call_count = 2 # pre_save and post_save + + with patch( + "tdpservice.users.signals.email_system_owner_system_admin_role_change" + ) as mock_email_system_owner_system_admin_role_change: + instance = AdminUserFactory.create() + instance.groups.add(Group.objects.get(name="OFA System Admin")) + mock_email_system_owner_system_admin_role_change.assert_has_calls([ + call(instance, 'is_staff_assigned'), + call(instance, 'is_superuser_assigned'), + call(instance, "added") + ]) diff --git a/tdrs-frontend/nginx/cloud.gov/locations.conf b/tdrs-frontend/nginx/cloud.gov/locations.conf index 2e14fc69f..85f681543 100644 --- a/tdrs-frontend/nginx/cloud.gov/locations.conf +++ b/tdrs-frontend/nginx/cloud.gov/locations.conf @@ -78,6 +78,24 @@ location /grafana/ { proxy_buffer_size 4k; } +location /alerts/ { + auth_request /plg_auth_check; + auth_request_set $auth_status $upstream_status; + + set $alerts http://alertmanager.apps.internal:8080$request_uri; + proxy_pass $alerts; + proxy_set_header Host $host:3000; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto https; + + proxy_connect_timeout 300; + proxy_read_timeout 300; + proxy_send_timeout 300; + send_timeout 900; + proxy_buffer_size 4k; +} + location = /plg_auth_check { internal; set $endpoint http://{{env "BACKEND_HOST"}}.apps.internal:8080/plg_auth_check/; diff --git a/tdrs-frontend/src/actions/reports.js b/tdrs-frontend/src/actions/reports.js index 8ecb8839e..766aafc7f 100644 --- a/tdrs-frontend/src/actions/reports.js +++ b/tdrs-frontend/src/actions/reports.js @@ -4,6 +4,7 @@ import axios from 'axios' import axiosInstance from '../axios-instance' import { logErrorToServer } from '../utils/eventLogger' import removeFileInputErrorState from '../utils/removeFileInputErrorState' +import { fileUploadSections } from '../reducers/reports' const BACKEND_URL = process.env.REACT_APP_BACKEND_URL diff --git a/tdrs-frontend/src/actions/reports.test.js b/tdrs-frontend/src/actions/reports.test.js index 40593f3bb..294e31c9a 100644 --- a/tdrs-frontend/src/actions/reports.test.js +++ b/tdrs-frontend/src/actions/reports.test.js @@ -241,6 +241,18 @@ describe('actions/reports', () => { }) }) + it('should dispatch SET_SELECTED_STT with empty stt', async () => { + const store = mockStore() + + await store.dispatch(setStt('')) + + const actions = store.getActions() + expect(actions[0].type).toBe(SET_SELECTED_STT) + expect(actions[0].payload).toStrictEqual({ + stt: '', + }) + }) + it('should dispatch SET_SELECTED_QUARTER', async () => { const store = mockStore() diff --git a/tdrs-frontend/src/assets/Reports.scss b/tdrs-frontend/src/assets/Reports.scss index 58b89ce6c..946e9ece6 100644 --- a/tdrs-frontend/src/assets/Reports.scss +++ b/tdrs-frontend/src/assets/Reports.scss @@ -42,6 +42,20 @@ cursor: pointer; } +.reprocessed { + background-color: transparent; + border: none; + color: #264A64; + text-align: left; + text-decoration: underline; + margin: 0; + padding: 0; +} + +.reprocessed:hover { + cursor: pointer; +} + .usa-table caption { width: 100%; } \ No newline at end of file diff --git a/tdrs-frontend/src/components/Footer/Footer.jsx b/tdrs-frontend/src/components/Footer/Footer.jsx index 7b0eb0539..2b6e607dc 100644 --- a/tdrs-frontend/src/components/Footer/Footer.jsx +++ b/tdrs-frontend/src/components/Footer/Footer.jsx @@ -34,8 +34,6 @@ function Footer() { ) : null} - -