diff --git a/.gitconfig b/.gitconfig index f70bcd581..b3cc6696c 100644 --- a/.gitconfig +++ b/.gitconfig @@ -1,17 +1,20 @@ [secrets] providers = git secrets --aws-provider - patterns = (A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16} - patterns = (\"|')?(AWS|aws|Aws)?_?(SECRET|secret|Secret)?_?(ACCESS|access|Access)?_?(KEY|key|Key)(\"|')?\\s*(:|=>|=)\\s*(\"|')?[A-Za-z0-9/\\+=]{40}(\"|')? - patterns = (\"|')?(AWS|aws|Aws)?_?(ACCOUNT|account|Account)_?(ID|id|Id)?(\"|')?\\s*(:|=>|=)\\s*(\"|')?[0-9]{4}\\-?[0-9]{4}\\-?[0-9]{4}(\"|')? - patterns = .+_KEY=.+ allowed = [A-Z]+_KEY=..echo \".{S3_CREDENTIALS}\" [|] jq -r .+ allowed = ./tdrs-backend/.env.example:.* allowed = ./tdrs-backend/docker-compose.yml:57:.* - allowed = ./tdrs-backend/manifest.proxy.yml:* + + allowed = ./tdrs-frontend/node_modules* allowed = regexes.json:.* allowed = ./scripts/copy-login-gov-keypair.sh:14:JWT_KEY=.* allowed = scripts/deploy-backend.sh:.+:DJANGO_SECRET_KEY=..python -c .from secrets import token_urlsafe. print.token_urlsafe..* allowed = .git/config:.* allowed = .gitconfig:.* - allowed = .*DJANGO_SECRET_KEY=.* + allowed = .*DJANGO_SECRET_KEY=.* #this is auto-generated in deployed environments + allowed = ./tdrs-backend/manifest.proxy.yml:* allowed = ./tdrs-backend/plg/loki/manifest.yml:* + patterns = (A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16} + patterns = (\"|')?(AWS|aws|Aws)?_?(SECRET|secret|Secret)?_?(ACCESS|access|Access)?_?(KEY|key|Key)(\"|')?\\s*(:|=>|=)\\s*(\"|')?[A-Za-z0-9/\\+=]{40}(\"|')? + patterns = (\"|')?(AWS|aws|Aws)?_?(ACCOUNT|account|Account)_?(ID|id|Id)?(\"|')?\\s*(:|=>|=)\\s*(\"|')?[0-9]{4}\\-?[0-9]{4}\\-?[0-9]{4}(\"|')? + patterns = .+_KEY=.+ + patterns = .+smtp_auth_password: .[^{]+ diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 000000000..7da1e7bb0 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,4 @@ +#!/bin/bash +set -e + +zsh ./scripts/git-secrets-check.sh local diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100755 index 000000000..51e4e28ff --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,14 @@ +#!/bin/bash +set -e + +task frontend-lint 2>/dev/null +if [ $? != "0" ]; then + echo "Frontend lint failed" + exit 1 +fi + +task backend-lint 2>/dev/null +if [ $? != "0" ]; then + echo "Backend lint failed" + exit 1 +fi \ No newline at end of file diff --git a/.gitignore b/.gitignore index f6766031a..7d693b2c7 100644 --- a/.gitignore +++ b/.gitignore @@ -115,3 +115,6 @@ cypress.env.json # DB seeds tdrs-backend/*.pg + +# Log files +*.log diff --git a/README.md b/README.md index c7ed080a9..ce86a895b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Temporary Assistance for Needy Families (TANF) Data Portal - TDP + # Temporary Assistance for Needy Families (TANF) Data Portal - TDP Welcome to the project for the New TANF Data Portal, which will replace the legacy TANF Data Reporting System! diff --git a/Taskfile.yml b/Taskfile.yml index 9f2488455..61bc997a2 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -2,6 +2,11 @@ version: '3' tasks: + gitcfg: + desc: Configure git + cmds: + - git config core.hooksPath .githooks + create-network: desc: Create the external network cmds: @@ -12,10 +17,10 @@ tasks: dir: tdrs-backend cmds: - task: create-network - - docker-compose -f docker-compose.yml up -d --build - - docker-compose -f docker-compose.yml exec web sh -c "python ./manage.py makemigrations" - - docker-compose -f docker-compose.yml exec web sh -c "python ./manage.py migrate" - - docker-compose -f docker-compose.yml down + - docker compose -f docker-compose.yml up -d --build + - docker compose -f docker-compose.yml exec web sh -c "python ./manage.py makemigrations" + - docker compose -f docker-compose.yml exec web sh -c "python ./manage.py migrate" + - docker compose -f docker-compose.yml down - task: sentry-down clone-sentry-repo: @@ -43,7 +48,7 @@ tasks: - docker cp .env sentry:/self-hosted/.env - docker exec sentry bash -c "cd self-hosted && ./install.sh --skip-user-creation --no-report-self-hosted-issues" # create a new user - - docker exec sentry bash -c "cd self-hosted && docker-compose run --rm web createuser --email admin@tanf.com --password admin --superuser" + - docker exec sentry bash -c "cd self-hosted && docker compose run --rm web createuser --email admin@tanf.com --password admin --superuser" # copy backup.json file to sentry - docker cp backup.json sentry:/self-hosted/sentry/backup.json # restore backup @@ -58,56 +63,56 @@ tasks: desc: Start sentry service dir: sentry cmds: - - docker exec sentry bash -c "cd self-hosted && docker-compose up -d" + - docker exec sentry bash -c "cd self-hosted && docker compose up -d" sentry-down: desc: Stop sentry service dir: sentry cmds: - - docker exec sentry bash -c "cd self-hosted && docker-compose down" + - docker exec sentry bash -c "cd self-hosted && docker compose down" drop-db: desc: Drop the backend database dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml down + - docker compose -f docker-compose.yml down - docker volume rm tdrs-backend_postgres_data backend-up: desc: Start backend web server dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml up -d + - docker compose -f docker-compose.yml up -d backend-down: desc: Stop backend web server dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml down + - docker compose -f docker-compose.yml down backend-logs: desc: Show and follow backend web server logs dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml logs -f + - docker compose -f docker-compose.yml logs -f backend-restart: desc: Restart backend web server dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml restart + - docker compose -f docker-compose.yml restart backend-bash: desc: Open a shell in the backend container dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml exec web sh + - docker compose -f docker-compose.yml exec web sh backend-shell: desc: Open a Django shell in the backend container dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml exec web sh -c "python ./manage.py shell" + - docker compose -f docker-compose.yml exec web sh -c "python ./manage.py shell" backend-exec: desc: Execute a command in the backend container @@ -115,7 +120,7 @@ tasks: vars: CMD: '{{.CMD}}' cmds: - - docker-compose -f docker-compose.yml exec web sh -c "python manage.py {{.CMD}}" + - docker compose -f docker-compose.yml exec web sh -c "python manage.py {{.CMD}}" backend-exec-seed-db: desc: Execute seed_db command in the backend container @@ -123,8 +128,8 @@ tasks: vars: CMD: '{{.CMD}}' cmds: - - docker-compose -f docker-compose.yml up -d - - docker-compose -f docker-compose.yml exec web sh -c "python manage.py populate_stts; python ./manage.py seed_db" + - docker compose -f docker-compose.yml up -d + - docker compose -f docker-compose.yml exec web sh -c "python manage.py populate_stts; python ./manage.py seed_db" backend-pytest: desc: 'Run pytest in the backend container E.g: task backend-pytest PYTEST_ARGS="tdpservice/test/ -s -vv"' @@ -133,20 +138,20 @@ tasks: PYTEST_ARGS: '{{.PYTEST_ARGS | default "."}}' cmds: - task backend-up - - docker-compose -f docker-compose.yml exec web sh -c "pytest {{.PYTEST_ARGS}}" + - docker compose -f docker-compose.yml exec web sh -c "pytest {{.PYTEST_ARGS}}" backend-remove-volumes: desc: Remove the backend volumes dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml down -v + - docker compose -f docker-compose.yml down -v backend-lint: desc: Run flake8 in the backend container dir: tdrs-backend cmds: - task backend-up - - docker-compose -f docker-compose.yml exec web sh -c "flake8 . && if [ $? -eq 0 ]; then echo 'Flake8 linter found no issues'; fi" + - docker compose -f docker-compose.yml exec -T web sh -c "flake8 . && if [ $? -eq 0 ]; then echo 'Flake8 linter found no issues'; fi" backend-pip-lock: #TODO: Add a task to lock the pip dependencies @@ -154,16 +159,16 @@ tasks: dir: tdrs-backend cmds: - task: backend-up - - docker-compose -f docker-compose.yml exec web sh -c "pipenv lock" + - docker compose -f docker-compose.yml exec web sh -c "pipenv lock" psql: desc: Open a psql shell in the backend container dir: tdrs-backend cmds: - task create-network || true - - docker-compose -f docker-compose.yml up -d postgres + - docker compose -f docker-compose.yml up -d postgres - sleep 5 - - docker-compose -f docker-compose.yml exec postgres sh -c "psql -U tdpuser -d tdrs_test" + - docker compose -f docker-compose.yml exec postgres sh -c "psql -U tdpuser -d tdrs_test" clean: desc: Remove all containers, networks, and volumes @@ -177,25 +182,25 @@ tasks: desc: Start clamav service dir: tdrs-backend cmds: - - docker-compose -f docker-compose.yml up -d clamav-rest + - docker compose -f docker-compose.yml up -d clamav-rest frontend-up: desc: Start frontend web server dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.yml up -d + - docker compose -f docker-compose.yml up -d frontend-down: desc: Stop frontend web server dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.yml down + - docker compose -f docker-compose.yml down frontend-restart: desc: Restart frontend web server dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.yml restart + - docker compose -f docker-compose.yml restart frontend-av: desc: Start frontend with optional clamav service @@ -210,43 +215,43 @@ tasks: desc: Initialize the frontend project dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.yml up -d --build - - docker-compose -f docker-compose.yml exec tdp-frontend sh -c "apk add nodejs npm" - - docker-compose -f docker-compose.yml exec tdp-frontend sh -c "npm install" - - docker-compose -f docker-compose.yml down + - docker compose -f docker-compose.yml up -d --build + - docker compose -f docker-compose.yml exec tdp-frontend sh -c "apk add nodejs npm" + - docker compose -f docker-compose.yml exec tdp-frontend sh -c "npm install" + - docker compose -f docker-compose.yml down frontend-test: desc: Run frontend tests dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.local.yml up tdp-frontend-test -d - - docker-compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test" + - docker compose -f docker-compose.local.yml up tdp-frontend-test -d + - docker compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test" frontend-test-cov: desc: Run frontend tests with coverage dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.local.yml up tdp-frontend-test -d - - docker-compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test:cov" + - docker compose -f docker-compose.local.yml up tdp-frontend-test -d + - docker compose -f docker-compose.local.yml exec tdp-frontend-test sh -c "npm run test:cov" frontend-lint: desc: Run eslint in the frontend container dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.local.yml up -d tdp-frontend-test --quiet-pull - - docker-compose -f docker-compose.yml exec tdp-frontend-test sh -c "npm run lint" + - docker compose -f docker-compose.local.yml up -d tdp-frontend-test --quiet-pull + - docker compose -f docker-compose.yml exec -T tdp-frontend-test sh -c "npm run lint" frontend-logs: desc: Show and follow frontend web server logs dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.yml logs -f + - docker compose -f docker-compose.yml logs -f frontend-bash: desc: Open a shell in the frontend container dir: tdrs-frontend cmds: - - docker-compose -f docker-compose.yml exec tdp-frontend bash + - docker compose -f docker-compose.yml exec tdp-frontend bash up: desc: Start both frontend and backend web servers diff --git a/docs/Technical-Documentation/tech-memos/priotitized-errors/prioritized-errors.md b/docs/Technical-Documentation/tech-memos/priotitized-errors/prioritized-errors.md new file mode 100644 index 000000000..931bceb47 --- /dev/null +++ b/docs/Technical-Documentation/tech-memos/priotitized-errors/prioritized-errors.md @@ -0,0 +1,100 @@ +# TDP Prioritized Parser Errors + +**Audience**: TDP Software Engineers +**Subject**: Prioritized Errors +**Date**: October 20, 2024 + +## Summary +This technical memorandum provides a suggested path to implement a set of new requirements OFA has generated to alleviate the sheer number of parser errors generated during a STT's data submission. OFA has indicated that some errors are of a lower priority for STTs to review and correct. Thus, the OFA team has requested that "critical" be assigned to parser errors so that the report STTs receive is filtered down to only the critical errors that must be reviewed and fixed. Regardless of how errors are prioritized, STTs will still retain the ability to see a summary of all errors detected in the error report. + +## Background +Currently, error reports are generated in the TDP backend via the `get_xls_serialized_file` function. This function accepts the serialized queryset of the appropriate `ParserError`s queryset. This function the writes an XLSX file and returns it to the user. Apart from the lack of priotization in the report generated from this function, it also introduces the possibility to cause an out of memory (OOM) error. This can occur because, the Django model serializer brings the entire queryset into memory to serialize it into JSON. Because these ParserError querysets can be very large (hundreds of thousands), we will also alleviate the memory pressure `get_xls_serialized_file` introduces by removing the Django model serializer and make use of queryset pagination. + +## Out of Scope +Current requirements from OFA do not require category two errors to be queryable by value and expected value. That feature is out of scope within the tech memo and would require more design and implementation work. + +## Method/Design +Given the current OFA requirements, we can implement prioritized/critical errors, and memory efficient report generation without too much work. OFA has provided [this OneNote](https://gorafttech.sharepoint.com/:o:/s/TDRSResearchDesign/EnIa1Mn4v7pOskW7BLomXhIBxUMlYLRU_f1C0dxemW7dWw?e=m0rNyI) document which outlines the error types, errors, and fields that are most important/prioritized for STTs to see. + +### Memory Efficient Report Generation +As previously mentioned in the #background section, the `get_xls_serialized_file` introduces a method to serialize parser errors into a XLSX that requires the entire queryset of parser errors to be brought into memory. Because these querysets can be very large, having them in memory regularly kills Gunicorn workers with an OOM error. To remedy the issue, this tech memo suggests updating `get_xls_serialized_file` to not use Django model serializers and instead leverage the power of Django querysets and pagination. To accomplish this, instead of passing a JSON serialized querset to `get_xls_serialized_file`, a standard (un-evaluated) queryset should be passed. Then, the body of the `get_xls_serialized_file` function should be updated appropriately to use a queryset object instead of a JSON object to generate the XLSX spreadsheet. The updates should also include paginating the queryset to avoid bringing the entirety of the queryset into memory at any one time. The code snippet below provides an example of paginating the queryset and writing the appropriate fields of each entry to the XLSX report. + +```python +paginator = Paginator(parser_errors, settings.BULK_CREATE_BATCH_SIZE) +row_idx = 6 +for page in paginator: + for record in page.object_list: + rpt_month_year = str(getattr(record, 'rpt_month_year', None)) + fields_json = getattr(record, 'fields_json', {}) + + worksheet.write(row_idx, 0, record.case_number) + worksheet.write(row_idx, 1, rpt_month_year[:4]) + worksheet.write(row_idx, 2, calendar.month_name[int(rpt_month_year[4:])] if rpt_month_year[4:] else None) + worksheet.write(row_idx, 3, format_error_msg(record.error_message, fields_json)) + worksheet.write(row_idx, 4, record.item_number) + worksheet.write(row_idx, 5, friendly_names(fields_json)) + worksheet.write(row_idx, 6, internal_names(fields_json)) + worksheet.write(row_idx, 7, record.row_number) + worksheet.write(row_idx, 8, str(ParserErrorCategoryChoices(record.error_type).label)) +``` + +The three helper functions: `format_error_msg`, `friendly_names`, `internal_names` used to write the appropriate fields can be seen below. + +```python +def format_error_msg(error_msg, fields_json): + """Format error message.""" + for key, value in fields_json['friendly_name'].items(): + error_msg = error_msg.replace(key, value) if value else error_msg + return error_msg + + +def friendly_names(fields_json): + """Return comma separated string of friendly names.""" + return ','.join([i for i in fields_json['friendly_name'].values()]) + + +def internal_names(fields_json): + """Return comma separated string of internal names.""" + return ','.join([i for i in fields_json['friendly_name'].keys()]) +``` + +### Prioritized/Critical Errors +[This OneNote](https://gorafttech.sharepoint.com/:o:/s/TDRSResearchDesign/EnIa1Mn4v7pOskW7BLomXhIBxUMlYLRU_f1C0dxemW7dWw?e=m0rNyI) is invaluable to the implementation of prioritized errors. Prioritizing errors could be a very large and technically challenging feature involving new migrations, validation/validator refactors, etc... However, this can all be avoided by making a key insight for each of the category two and category three validators by way of OFA's requirements for them. For the category two case, the OneNote document generically specifies category two validation surrounding: Family Affiliation, Citizenship and Closure reason. Further discussion with OFA indicated that it is important/a priority for a STT to see all category two errors encompassing these fields in their entirety. That makes prioritizing these category two errors extremely easy because the need to query those fields by specific values and expected values is not required. The queries below provide a complete implementation to query all category two errors encompassing those fields. + +```python +# All cat2 errors associated with FAMILY_AFFILIATION and (CITIZENSHIP_STATUS or CLOSURE_REASON) +second_field = "CITIZENSHIP_STATUS" if is_active else "CLOSURE_REASON" +field_query = Q(field_name="FAMILY_AFFILIATION") | Q(field_name=second_field) +filtered_errors = filtered_errors.union(all_errors.filter( + field_query, + error_type=ParserErrorCategoryChoices.FIELD_VALUE + )) +``` + +The key insight for the category three case is less obvious. Looking at the OneNote, it seems as though we might need to query errors based on field name(s), expected value and actual value. However, for category three errors that information is encoded into the error by its existence. For example, the OneNote indicates that a high priority error a STT should have included in their report is `If fam affil = 1 then SSN must be valid `. This exact error and it's values (expected and given) can be uniquely found in any of the active or closed case record schemas. E.g.: + +```python +category3.ifThenAlso( + condition_field_name='FAMILY_AFFILIATION', + condition_function=category3.isEqual(1), + result_field_name='SSN', + result_function=category3.validateSSN(), +) +``` + +The existence of this error, with these fields, is uniquely defined in the appropriate schemas. The same can be said for the remaining critical category three errors. Thus, to define the high priority errors we need only know the required field(s) and their error type. Given those pieces of information, queries of the form below can be used to filter STT error reports to only show the highest priority errors. + +```python +errors.filter(fields_json__friendly_name__has_keys=[FIELD_NAME, FIELD_NAME, ETC...], + error_type=ParserErrorCategoryChoices.VALUE_CONSISTENCY) +``` + +By unioning the category two queries from above with the remainder of the category three queries, a queryset containing only the critical errors can be generated and subsequently passed to `get_xls_serialized_file` generate and return the prioritized error report to the requesting STT. + +## Affected Systems +- TDP backend +- TDP frontend: latency time incurred while generating report + +## Use and Test cases to consider +- Admin and STT receive the same report +- Existing tests leveraging ParserError querysets are updated and re-validated for correctness diff --git a/scripts/git-secrets-check.sh b/scripts/git-secrets-check.sh index f371f303e..dcfcd7821 100755 --- a/scripts/git-secrets-check.sh +++ b/scripts/git-secrets-check.sh @@ -1,29 +1,57 @@ #!/bin/bash set -e +islocal=$1 -if [ -d /tmp/git-secrets ]; then +if [[ $(uname -s) == "Darwin" ]]; then # Mac OSX check + gs_path="/usr/local/bin" +else # Linux, we're likely running in CircleCI + gs_path="/usr/sbin" +fi + +if [ -f "$gs_path/git-secrets" ]; then echo The command git-secrets is available else echo The command git-secrets is not available, cloning... git clone git@github.com:awslabs/git-secrets.git /tmp/git-secrets/ if [ -f /tmp/git-secrets/git-secrets ]; then - echo "Moving git secrets into PATH" - sudo cp /tmp/git-secrets/git-secrets /usr/sbin/ + + echo "Moving git secrets into PATH" + sudo cp /tmp/git-secrets/git-secrets $gs_path/ + $gs_path/git-secrets --install -f + rm -rf /tmp/git-secrets #cleanup of clone dir else - echo "Git clone failed for git-secrets" + echo "Git clone failed for git-secrets" fi fi # ensure we have correct configs in place -[ -f ../.gitconfig ] -cat .gitconfig >> .git/config -echo "Git-Secrets Config loaded:" -grep -A10 secrets .git/config -# grep will return non-zero code if nothing found, failing the build +if [ -f .gitconfig ]; then + cat .gitconfig >> .git/config + echo "Git-Secrets Config loaded:" + grep -A10 secrets .git/config + # grep will return non-zero code if nothing found, failing the build +fi -echo "git-secrets-check.sh: Scanning repo ..." -git secrets --scan -r ../ -retVal=$? +if [ $islocal ]; then + echo "git-secrets-check.sh: Scanning files staged for commit ..." + setopt shwordsplit + staged_files=$(git diff --cached --name-status | grep -vE "D|^R[0-9]+"| cut -f2 | xargs) + + for filename in $staged_files; do + echo "git-secrets-check.sh: Scanning $filename ..." + git secrets --scan $filename + retVal=$? + if [[ $retVal -ne 0 ]]; then + echo "git-secrets found issues, prevented commit." + return 1 + fi + done + +else + echo "git-secrets-check.sh: Scanning repo ..." + git secrets --scan -r ../ + retVal=$? +fi # if there are issues, they will be listed then script will abort here if [[ $retVal -eq 0 ]]; then @@ -32,4 +60,3 @@ else echo "git-secrets-check.sh: Issues found with return code $retVal, please remediate." return 1 fi - diff --git a/tdrs-backend/Dockerfile b/tdrs-backend/Dockerfile index 34ef5dd9b..6b908eee6 100644 --- a/tdrs-backend/Dockerfile +++ b/tdrs-backend/Dockerfile @@ -9,7 +9,7 @@ ENV DJANGO_SETTINGS_MODULE=tdpservice.settings.local ENV DJANGO_CONFIGURATION=Local # Allows docker to cache installed dependencies between builds COPY Pipfile Pipfile.lock /tdpapp/ -COPY sources.list /etc/apt/sources.list +# COPY sources.list /etc/apt/sources.list WORKDIR /tdpapp/ # Download latest listing of available packages: RUN apt-get -y update diff --git a/tdrs-backend/tdpservice/data_files/test/test_api.py b/tdrs-backend/tdpservice/data_files/test/test_api.py index 78685b075..5fb3a0a5c 100644 --- a/tdrs-backend/tdpservice/data_files/test/test_api.py +++ b/tdrs-backend/tdpservice/data_files/test/test_api.py @@ -1,4 +1,5 @@ """Tests for DataFiles Application.""" +import os from rest_framework import status import pytest import base64 @@ -82,62 +83,58 @@ def get_spreadsheet(response): """Return error report.""" decoded_response = base64.b64decode(response.data['xls_report']) + if os.path.exists('mycls.xlsx'): + os.remove('mycls.xlsx') + # write the excel file to disk with open('mycls.xlsx', 'wb') as f: f.write(decoded_response) # read the excel file from disk wb = openpyxl.load_workbook('mycls.xlsx') - ws = wb.get_sheet_by_name('Sheet1') - return ws + critical = wb['Critical'] + summary = wb['Summary'] + return critical, summary @staticmethod def assert_error_report_tanf_file_content_matches_with_friendly_names(response): """Assert the error report file contents match expected with friendly names.""" - ws = DataFileAPITestBase.get_spreadsheet(response) + critical, summary = DataFileAPITestBase.get_spreadsheet(response) COL_ERROR_MESSAGE = 4 + COL_NUM_OCCURRENCES = 8 - assert ws.cell(row=1, column=1).value == "Please refer to the most recent versions of the coding " \ + assert critical.cell(row=1, column=1).value == "Please refer to the most recent versions of the coding " \ + "instructions (linked below) when looking up items and allowable values during the data revision process" - assert ws.cell(row=8, column=COL_ERROR_MESSAGE).value == ( - "Since Item 21A (Cash Amount) is 873, then Item 21B " - "(Cash and Cash Equivalents: Number of Months) 0 must be greater than 0" - ) + assert critical.cell(row=8, column=COL_ERROR_MESSAGE).value == "No records created." + assert summary.cell(row=7, column=COL_NUM_OCCURRENCES).value == 1 @staticmethod def assert_error_report_ssp_file_content_matches_with_friendly_names(response): """Assert the error report file contents match expected with friendly names.""" - ws = DataFileAPITestBase.get_spreadsheet(response) + critical, summary = DataFileAPITestBase.get_spreadsheet(response) COL_ERROR_MESSAGE = 4 + COL_NUM_OCCURRENCES = 8 - assert ws.cell(row=1, column=1).value == "Please refer to the most recent versions of the coding " \ + assert critical.cell(row=1, column=1).value == "Please refer to the most recent versions of the coding " \ + "instructions (linked below) when looking up items and allowable values during the data revision process" - assert ws.cell(row=7, column=COL_ERROR_MESSAGE).value == ("M1 Item 11 (Receives Subsidized Housing): 3 is " - "not in range [1, 2].") + assert critical.cell(row=7, column=COL_ERROR_MESSAGE).value == ("TRAILER: record length is 15 characters " + "but must be 23.") + assert summary.cell(row=7, column=COL_NUM_OCCURRENCES).value == 5 @staticmethod def assert_error_report_file_content_matches_without_friendly_names(response): """Assert the error report file contents match expected without friendly names.""" - decoded_response = base64.b64decode(response.data['xls_report']) - - # write the excel file to disk - with open('mycls.xlsx', 'wb') as f: - f.write(decoded_response) - - # read the excel file from disk - wb = openpyxl.load_workbook('mycls.xlsx') - ws = wb.get_sheet_by_name('Sheet1') + critical, summary = DataFileAPITestBase.get_spreadsheet(response) COL_ERROR_MESSAGE = 4 + COL_NUM_OCCURRENCES = 8 - assert ws.cell(row=1, column=1).value == "Please refer to the most recent versions of the coding " \ + assert critical.cell(row=1, column=1).value == "Please refer to the most recent versions of the coding " \ + "instructions (linked below) when looking up items and allowable values during the data revision process" - assert ws.cell(row=8, column=COL_ERROR_MESSAGE).value == ( - "Since Item 21A (Cash Amount) is 873, then Item 21B " - "(Cash and Cash Equivalents: Number of Months) 0 must be greater than 0" - ) + assert critical.cell(row=8, column=COL_ERROR_MESSAGE).value == "No records created." + assert summary.cell(row=7, column=COL_NUM_OCCURRENCES).value == 1 @staticmethod def assert_data_file_exists(data_file_data, version, user): diff --git a/tdrs-backend/tdpservice/data_files/util.py b/tdrs-backend/tdpservice/data_files/util.py index 0d2d7a941..b7cc836b0 100644 --- a/tdrs-backend/tdpservice/data_files/util.py +++ b/tdrs-backend/tdpservice/data_files/util.py @@ -3,54 +3,88 @@ from io import BytesIO import xlsxwriter import calendar -from tdpservice.parsers.models import ParserErrorCategoryChoices +from django.conf import settings +from django.core.paginator import Paginator +from django.db import models +from django.db.models import Count, Q +from django.utils.translation import gettext_lazy as _ -def get_xls_serialized_file(data): - """Return xls file created from the error.""" +class ParserErrorCategoryChoices(models.TextChoices): + """Enum of ParserError error_type.""" + + PRE_CHECK = "1", _("File pre-check") + FIELD_VALUE = "2", _("Record value invalid") + VALUE_CONSISTENCY = "3", _("Record value consistency") + CASE_CONSISTENCY = "4", _("Case consistency") + SECTION_CONSISTENCY = "5", _("Section consistency") + HISTORICAL_CONSISTENCY = "6", _("Historical consistency") + + +def get_prioritized_queryset(parser_errors): + """Generate a prioritized queryset of ParserErrors.""" + PRIORITIZED_CAT2 = ( + ("FAMILY_AFFILIATION", "CITIZENSHIP_STATUS", "CLOSURE_REASON"), + ) + PRIORITIZED_CAT3 = ( + ("FAMILY_AFFILIATION", "SSN"), + ("FAMILY_AFFILIATION", "CITIZENSHIP_STATUS"), + ("AMT_FOOD_STAMP_ASSISTANCE", "AMT_SUB_CC", "CASH_AMOUNT", "CC_AMOUNT", "TRANSP_AMOUNT"), + ("FAMILY_AFFILIATION", "SSN", "CITIZENSHIP_STATUS"), + ("FAMILY_AFFILIATION", "PARENT_MINOR_CHILD"), + ("FAMILY_AFFILIATION", "EDUCATION_LEVEL"), + ("FAMILY_AFFILIATION", "WORK_ELIGIBLE_INDICATOR"), + ("CITIZENSHIP_STATUS", "WORK_ELIGIBLE_INDICATOR"), + ) + + # All cat1/4 errors + error_type_query = Q(error_type=ParserErrorCategoryChoices.PRE_CHECK) | \ + Q(error_type=ParserErrorCategoryChoices.CASE_CONSISTENCY) + filtered_errors = parser_errors.filter(error_type_query) + + for fields in PRIORITIZED_CAT2: + filtered_errors = filtered_errors.union(parser_errors.filter( + field_name__in=fields, + error_type=ParserErrorCategoryChoices.FIELD_VALUE + )) + + for fields in PRIORITIZED_CAT3: + filtered_errors = filtered_errors.union(parser_errors.filter( + fields_json__friendly_name__has_keys=fields, + error_type=ParserErrorCategoryChoices.VALUE_CONSISTENCY + )) + + return filtered_errors + + +def format_error_msg(error_msg, fields_json): + """Format error message.""" + for key, value in fields_json['friendly_name'].items(): + error_msg = error_msg.replace(key, value) if value else error_msg + return error_msg + + +def friendly_names(fields_json): + """Return comma separated string of friendly names.""" + return ','.join([i for i in fields_json['friendly_name'].values()]) + + +def internal_names(fields_json): + """Return comma separated string of internal names.""" + return ','.join([i for i in fields_json['friendly_name'].keys()]) - def chk(x): - """Check if fields_json is not None.""" - x['fields_json'] = x['fields_json'] if x.get('fields_json', None) else { - 'friendly_name': { - x['field_name']: x['field_name'] - }, - } - x['fields_json']['friendly_name'] = x['fields_json']['friendly_name'] if x['fields_json'].get( - 'friendly_name', None) else { - x['field_name']: x['field_name'] - } - if None in x['fields_json']['friendly_name'].keys(): - x['fields_json']['friendly_name'].pop(None) - if None in x['fields_json']['friendly_name'].values(): - x['fields_json']['friendly_name'].pop() - return x - - def format_error_msg(x): - """Format error message.""" - error_msg = x['error_message'] - for key, value in x['fields_json']['friendly_name'].items(): - error_msg = error_msg.replace(key, value) if value else error_msg - return error_msg +def check_fields_json(fields_json, field_name): + """If fields_json is None, impute field name to avoid NoneType errors.""" + if not fields_json: + child_dict = {field_name: field_name} if field_name else {} + fields_json = {'friendly_name': child_dict} + return fields_json + + +def write_worksheet_banner(worksheet): + """Write worksheet banner.""" row, col = 0, 0 - output = BytesIO() - workbook = xlsxwriter.Workbook(output) - worksheet = workbook.add_worksheet() - - report_columns = [ - ('case_number', lambda x: x['case_number']), - ('year', lambda x: str(x['rpt_month_year'])[0:4] if x['rpt_month_year'] else None), - ('month', lambda x: calendar.month_name[ - int(str(x['rpt_month_year'])[4:]) - ] if x['rpt_month_year'] else None), - ('error_message', lambda x: format_error_msg(chk(x))), - ('item_number', lambda x: x['item_number']), - ('item_name', lambda x: ','.join([i for i in chk(x)['fields_json']['friendly_name'].values()])), - ('internal_variable_name', lambda x: ','.join([i for i in chk(x)['fields_json']['friendly_name'].keys()])), - ('row_number', lambda x: x['row_number']), - ('error_type', lambda x: str(ParserErrorCategoryChoices(x['error_type']).label)), - ] # write beta banner worksheet.write( @@ -81,26 +115,99 @@ def format_error_msg(x): string='Visit the Knowledge Center for further guidance on reviewing error reports' ) - row, col = 5, 0 - # write csv header - bold = workbook.add_format({'bold': True}) +def format_header(header_list: list): + """Format header.""" + return ' '.join([i.capitalize() for i in header_list.split('_')]) + + +def write_prioritized_errors(worksheet, prioritized_errors, bold): + """Write prioritized errors to spreadsheet.""" + row, col = 5, 0 - def format_header(header_list: list): - """Format header.""" - return ' '.join([i.capitalize() for i in header_list.split('_')]) + # We will write the headers in the first row + columns = ['case_number', 'year', 'month', + 'error_message', 'item_number', 'item_name', + 'internal_variable_name', 'row_number', 'error_type', + ] + for idx, col in enumerate(columns): + worksheet.write(row, idx, format_header(col), bold) + + paginator = Paginator(prioritized_errors.order_by('pk'), settings.BULK_CREATE_BATCH_SIZE) + row_idx = 6 + for page in paginator: + for record in page.object_list: + rpt_month_year = getattr(record, 'rpt_month_year', None) + rpt_month_year = str(rpt_month_year) if rpt_month_year else "" + + fields_json = check_fields_json(getattr(record, 'fields_json', {}), record.field_name) + + worksheet.write(row_idx, 0, record.case_number) + worksheet.write(row_idx, 1, rpt_month_year[:4]) + worksheet.write(row_idx, 2, calendar.month_name[int(rpt_month_year[4:])] if rpt_month_year[4:] else None) + worksheet.write(row_idx, 3, format_error_msg(record.error_message, fields_json)) + worksheet.write(row_idx, 4, record.item_number) + worksheet.write(row_idx, 5, friendly_names(fields_json)) + worksheet.write(row_idx, 6, internal_names(fields_json)) + worksheet.write(row_idx, 7, record.row_number) + worksheet.write(row_idx, 8, str(ParserErrorCategoryChoices(record.error_type).label)) + row_idx += 1 + + +def write_aggregate_errors(worksheet, all_errors, bold): + """Aggregate by error message and write.""" + row, col = 5, 0 # We will write the headers in the first row - [worksheet.write(row, col, format_header(key[0]), bold) for col, key in enumerate(report_columns)] + columns = ['year', 'month', 'error_message', 'item_number', 'item_name', + 'internal_variable_name', 'error_type', 'number_of_occurrences' + ] + for idx, col in enumerate(columns): + worksheet.write(row, idx, format_header(col), bold) + + aggregates = all_errors.values('rpt_month_year', 'error_message', + 'item_number', 'field_name', + 'fields_json', 'error_type').annotate(num_occurrences=Count('error_message')) + + paginator = Paginator(aggregates.order_by('-num_occurrences'), settings.BULK_CREATE_BATCH_SIZE) + row_idx = 6 + for page in paginator: + for record in page.object_list: + rpt_month_year = record['rpt_month_year'] + rpt_month_year = str(rpt_month_year) if rpt_month_year else "" + + fields_json = check_fields_json(record['fields_json'], record['field_name']) + + worksheet.write(row_idx, 0, rpt_month_year[:4]) + worksheet.write(row_idx, 1, calendar.month_name[int(rpt_month_year[4:])] if rpt_month_year[4:] else None) + worksheet.write(row_idx, 2, format_error_msg(record['error_message'], fields_json)) + worksheet.write(row_idx, 3, record['item_number']) + worksheet.write(row_idx, 4, friendly_names(fields_json)) + worksheet.write(row_idx, 5, internal_names(fields_json)) + worksheet.write(row_idx, 6, str(ParserErrorCategoryChoices(record['error_type']).label)) + worksheet.write(row_idx, 7, record['num_occurrences']) + row_idx += 1 + + +def get_xls_serialized_file(all_errors, prioritized_errors): + """Return xls file created from the error.""" + output = BytesIO() + workbook = xlsxwriter.Workbook(output) + prioritized_sheet = workbook.add_worksheet(name="Critical") + aggregate_sheet = workbook.add_worksheet(name="Summary") - [ - worksheet.write(row + 6, col, key[1](data_i)) for col, key in enumerate(report_columns) - for row, data_i in enumerate(data) - ] + write_worksheet_banner(prioritized_sheet) + write_worksheet_banner(aggregate_sheet) + + bold = workbook.add_format({'bold': True}) + write_prioritized_errors(prioritized_sheet, prioritized_errors, bold) + write_aggregate_errors(aggregate_sheet, all_errors, bold) # autofit all columns except for the first one - worksheet.autofit() - worksheet.set_column(0, 0, 20) + prioritized_sheet.autofit() + prioritized_sheet.set_column(0, 0, 20) + aggregate_sheet.autofit() + aggregate_sheet.set_column(0, 0, 20) workbook.close() - return {"data": data, "xls_report": base64.b64encode(output.getvalue()).decode("utf-8")} + return {"xls_report": base64.b64encode(output.getvalue()).decode("utf-8")} diff --git a/tdrs-backend/tdpservice/data_files/views.py b/tdrs-backend/tdpservice/data_files/views.py index 3f67d7cb3..8263fe62b 100644 --- a/tdrs-backend/tdpservice/data_files/views.py +++ b/tdrs-backend/tdpservice/data_files/views.py @@ -15,13 +15,12 @@ from rest_framework import status from tdpservice.data_files.serializers import DataFileSerializer -from tdpservice.data_files.util import get_xls_serialized_file +from tdpservice.data_files.util import get_xls_serialized_file, get_prioritized_queryset from tdpservice.data_files.models import DataFile, get_s3_upload_path from tdpservice.users.permissions import DataFilePermissions, IsApprovedPermission from tdpservice.scheduling import parser_task from tdpservice.data_files.s3_client import S3Client from tdpservice.parsers.models import ParserError -from tdpservice.parsers.serializers import ParsingErrorSerializer logger = logging.getLogger(__name__) @@ -147,9 +146,10 @@ def download(self, request, pk=None): def download_error_report(self, request, pk=None): """Generate and return the parsing error report xlsx.""" datafile = self.get_object() - parser_errors = ParserError.objects.all().filter(file=datafile) - serializer = ParsingErrorSerializer(parser_errors, many=True, context=self.get_serializer_context()) - return Response(get_xls_serialized_file(serializer.data)) + all_errors = ParserError.objects.filter(file=datafile) + filtered_errors = get_prioritized_queryset(all_errors) + + return Response(get_xls_serialized_file(all_errors, filtered_errors)) class GetYearList(APIView): diff --git a/tdrs-backend/tdpservice/parsers/models.py b/tdrs-backend/tdpservice/parsers/models.py index f9c5f3c63..f1e470e6e 100644 --- a/tdrs-backend/tdpservice/parsers/models.py +++ b/tdrs-backend/tdpservice/parsers/models.py @@ -2,24 +2,15 @@ import datetime from django.db import models -from django.utils.translation import gettext_lazy as _ from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from tdpservice.data_files.models import DataFile +from tdpservice.data_files.util import ParserErrorCategoryChoices + import logging logger = logging.getLogger(__name__) -class ParserErrorCategoryChoices(models.TextChoices): - """Enum of ParserError error_type.""" - - PRE_CHECK = "1", _("File pre-check") - FIELD_VALUE = "2", _("Record value invalid") - VALUE_CONSISTENCY = "3", _("Record value consistency") - CASE_CONSISTENCY = "4", _("Case consistency") - SECTION_CONSISTENCY = "5", _("Section consistency") - HISTORICAL_CONSISTENCY = "6", _("Historical consistency") - class ParserError(models.Model): """Model representing a parser error.""" @@ -139,7 +130,7 @@ def get_status(self): return DataFileSummary.Status.REJECTED elif errors.count() == 0: return DataFileSummary.Status.ACCEPTED - elif row_precheck_errors.count() > 0 or case_consistency_errors.count() > 0: + elif (row_precheck_errors.count() > 0 or case_consistency_errors.count()): return DataFileSummary.Status.PARTIALLY_ACCEPTED else: return DataFileSummary.Status.ACCEPTED_WITH_ERRORS diff --git a/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py b/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py index 82d5c2c46..20edf6fdb 100644 --- a/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py +++ b/tdrs-backend/tdpservice/parsers/schema_defs/ssp/m2.py @@ -317,7 +317,7 @@ startIndex=48, endIndex=49, required=False, - validators=[category2.isGreaterThan(0)] + validators=[category2.isGreaterThan(0, inclusive=True)] ), Field( item="32E", diff --git a/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py b/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py index 8f9aba575..9dc92acd1 100644 --- a/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py +++ b/tdrs-backend/tdpservice/parsers/schema_defs/tanf/t1.py @@ -66,12 +66,6 @@ result_field_name="WORK_REQ_SANCTION", result_function=category3.isOneOf((1, 2)), ), - category3.ifThenAlso( - condition_field_name="SANC_REDUCTION_AMT", - condition_function=category3.isGreaterThan(0), - result_field_name="FAMILY_SANC_ADULT", - result_function=category3.isOneOf((1, 2)), - ), category3.ifThenAlso( condition_field_name="SANC_REDUCTION_AMT", condition_function=category3.isGreaterThan(0), @@ -635,7 +629,7 @@ endIndex=114, required=False, validators=[ - category2.isOneOf(["9", " "]), + category2.isOneOf(["9", "0", " "]), category2.isAlphaNumeric(), ], ), @@ -658,7 +652,7 @@ endIndex=117, required=False, validators=[ - category2.isOneOf([1, 2]), + category2.isOneOf([0, 1, 2]), ], ), Field( diff --git a/tdrs-backend/tdpservice/stts/models.py b/tdrs-backend/tdpservice/stts/models.py index b883ded74..b960d0e55 100644 --- a/tdrs-backend/tdpservice/stts/models.py +++ b/tdrs-backend/tdpservice/stts/models.py @@ -4,6 +4,9 @@ from django.db.models import constraints +DEFAULT_NUMBER_OF_SECTIONS = 4 + + class Region(models.Model): """A model representing a US region.""" @@ -39,6 +42,14 @@ class EntityType(models.TextChoices): ssp = models.BooleanField(default=False, null=True) sample = models.BooleanField(default=False, null=True) + @property + def num_sections(self): + """The number of sections this STT submits.""" + if self.filenames is None: + return DEFAULT_NUMBER_OF_SECTIONS + divisor = int(self.ssp) + 1 + return len(self.filenames) // divisor + class Meta: """Metadata.""" diff --git a/tdrs-backend/tdpservice/stts/serializers.py b/tdrs-backend/tdpservice/stts/serializers.py index be2ec88b6..7774e87ab 100644 --- a/tdrs-backend/tdpservice/stts/serializers.py +++ b/tdrs-backend/tdpservice/stts/serializers.py @@ -14,7 +14,7 @@ class Meta: """Metadata.""" model = STT - fields = ["id", "type", "postal_code", "name", "region", "filenames", "stt_code", "ssp",] + fields = ["id", "type", "postal_code", "name", "region", "filenames", "stt_code", "ssp", "num_sections"] def get_postal_code(self, obj): """Return the state postal_code.""" diff --git a/tdrs-backend/tdpservice/users/models.py b/tdrs-backend/tdpservice/users/models.py index 40f8dc900..3cf094264 100644 --- a/tdrs-backend/tdpservice/users/models.py +++ b/tdrs-backend/tdpservice/users/models.py @@ -118,9 +118,11 @@ def __str__(self): """Return the username as the string representation of the object.""" return self.username - def is_in_group(self, group_name: str) -> bool: - """Return whether or not the user is a member of the specified Group.""" - return self.groups.filter(name=group_name).exists() + def is_in_group(self, group_names: list) -> bool: + """Return whether or not the user is a member of the specified Group(s).""" + if type(group_names) == str: + group_names = [group_names] + return self.groups.filter(name__in=group_names).exists() def validate_location(self): """Throw a validation error if a user has a location type incompatable with their role.""" @@ -180,6 +182,11 @@ def is_ocio_staff(self) -> bool: """Return whether or not the user is in the ACF OCIO Group.""" return self.is_in_group("ACF OCIO") + @property + def is_an_admin(self) -> bool: + """Return whether or not the user is in the OFA Admin Group or OFA System Admin.""" + return self.is_in_group(["OFA Admin", "OFA System Admin"]) + @property def is_ofa_sys_admin(self) -> bool: """Return whether or not the user is in the OFA System Admin Group.""" diff --git a/tdrs-frontend/src/actions/reports.js b/tdrs-frontend/src/actions/reports.js index 8ecb8839e..766aafc7f 100644 --- a/tdrs-frontend/src/actions/reports.js +++ b/tdrs-frontend/src/actions/reports.js @@ -4,6 +4,7 @@ import axios from 'axios' import axiosInstance from '../axios-instance' import { logErrorToServer } from '../utils/eventLogger' import removeFileInputErrorState from '../utils/removeFileInputErrorState' +import { fileUploadSections } from '../reducers/reports' const BACKEND_URL = process.env.REACT_APP_BACKEND_URL diff --git a/tdrs-frontend/src/actions/reports.test.js b/tdrs-frontend/src/actions/reports.test.js index 40593f3bb..294e31c9a 100644 --- a/tdrs-frontend/src/actions/reports.test.js +++ b/tdrs-frontend/src/actions/reports.test.js @@ -241,6 +241,18 @@ describe('actions/reports', () => { }) }) + it('should dispatch SET_SELECTED_STT with empty stt', async () => { + const store = mockStore() + + await store.dispatch(setStt('')) + + const actions = store.getActions() + expect(actions[0].type).toBe(SET_SELECTED_STT) + expect(actions[0].payload).toStrictEqual({ + stt: '', + }) + }) + it('should dispatch SET_SELECTED_QUARTER', async () => { const store = mockStore() diff --git a/tdrs-frontend/src/components/Reports/Reports.jsx b/tdrs-frontend/src/components/Reports/Reports.jsx index a22ae4fb1..0ac0f3d98 100644 --- a/tdrs-frontend/src/components/Reports/Reports.jsx +++ b/tdrs-frontend/src/components/Reports/Reports.jsx @@ -455,7 +455,7 @@ function Reports() { {selectedSubmissionTab === 1 && ( { setIsToggled(false) resetPreviousValues() diff --git a/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx b/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx index a1e28b7c0..b768fb7cd 100644 --- a/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx +++ b/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx @@ -1,10 +1,9 @@ import React from 'react' import PropTypes from 'prop-types' -import classNames from 'classnames' import { useDispatch, useSelector } from 'react-redux' -import { fileUploadSections } from '../../reducers/reports' import Paginator from '../Paginator' import { getAvailableFileList } from '../../actions/reports' +import { fileUploadSections } from '../../reducers/reports' import { useEffect } from 'react' import { useState } from 'react' import { CaseAggregatesTable } from './CaseAggregatesTable' @@ -64,6 +63,7 @@ const SubmissionHistory = ({ filterValues }) => { const dispatch = useDispatch() const [hasFetchedFiles, setHasFetchedFiles] = useState(false) const { files } = useSelector((state) => state.reports) + const num_sections = filterValues.stt.num_sections useEffect(() => { if (!hasFetchedFiles) { @@ -87,15 +87,17 @@ const SubmissionHistory = ({ filterValues }) => { - {fileUploadSections.map((section, index) => ( - f.section.includes(section))} - /> - ))} + {fileUploadSections.slice(0, num_sections).map((section, index) => { + return ( + f.section.includes(section))} + /> + ) + })} > ) diff --git a/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.test.js b/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.test.js index 325c7d898..eda2d13b8 100644 --- a/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.test.js +++ b/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.test.js @@ -18,7 +18,7 @@ describe('SubmissionHistory', () => { const defaultFilterValues = { quarter: 'Q1', year: '2023', - stt: { id: 4 }, + stt: { id: 5 }, file_type: 'TANF', } @@ -324,7 +324,7 @@ describe('SubmissionHistory', () => { setup(store, { ...defaultFilterValues, - stt: { id: 48 }, + stt: { id: 5 }, file_type: 'SSP', }) diff --git a/tdrs-frontend/src/components/UploadReport/UploadReport.jsx b/tdrs-frontend/src/components/UploadReport/UploadReport.jsx index 9e51c11a7..a2348fe65 100644 --- a/tdrs-frontend/src/components/UploadReport/UploadReport.jsx +++ b/tdrs-frontend/src/components/UploadReport/UploadReport.jsx @@ -7,8 +7,8 @@ import Button from '../Button' import FileUpload from '../FileUpload' import { submit } from '../../actions/reports' -import { useEventLogger } from '../../utils/eventLogger' import { fileUploadSections } from '../../reducers/reports' +import { useEventLogger } from '../../utils/eventLogger' function UploadReport({ handleCancel, stt }) { // The currently selected year from the reportingYears dropdown @@ -20,9 +20,14 @@ function UploadReport({ handleCancel, stt }) { // The set of uploaded files in our Redux state const files = useSelector((state) => state.reports.submittedFiles) + // The logged in user in our Redux state const user = useSelector((state) => state.auth.user) + // The number of sections this stt submits data for and it's ID + const stt_id = stt?.id + const num_sections = stt === undefined ? 4 : stt.num_sections + // TODO: Move this to Redux state so we can modify this value outside of // this component without having to pass the setter function around const [localAlert, setLocalAlertState] = useState({ @@ -70,7 +75,7 @@ function UploadReport({ handleCancel, stt }) { formattedSections, logger, setLocalAlertState, - stt, + stt: stt_id, uploadedFiles, user, ssp: selectedFileType === 'ssp-moe', @@ -105,13 +110,15 @@ function UploadReport({ handleCancel, stt }) { )} - {fileUploadSections.map((name, index) => ( - - ))} + {fileUploadSections.slice(0, num_sections).map((section, index) => { + return ( + + ) + })} @@ -129,7 +136,7 @@ function UploadReport({ handleCancel, stt }) { UploadReport.propTypes = { handleCancel: PropTypes.func.isRequired, - stt: PropTypes.number, + stt: PropTypes.object, } export default UploadReport diff --git a/tdrs-frontend/src/reducers/reports.test.js b/tdrs-frontend/src/reducers/reports.test.js index c96ca88e2..6e41b21f7 100644 --- a/tdrs-frontend/src/reducers/reports.test.js +++ b/tdrs-frontend/src/reducers/reports.test.js @@ -4,6 +4,7 @@ import { CLEAR_ERROR, SET_FILE, CLEAR_FILE, + FILE_EXT_ERROR, SET_FILE_ERROR, SET_SELECTED_YEAR, SET_SELECTED_STT, @@ -267,6 +268,52 @@ describe('reducers/reports', () => { }) }) + it('should handle FILE_EXT_ERROR', () => { + expect( + reducer(undefined, { + type: FILE_EXT_ERROR, + payload: { + error: { message: 'Test invalid ext.' }, + section: 'Active Case Data', + }, + }) + ).toEqual({ + ...initialState, + submittedFiles: [ + { + id: null, + file: null, + section: 'Active Case Data', + fileName: undefined, + error: { message: 'Test invalid ext.' }, + uuid: null, + fileType: null, + }, + { + section: 'Closed Case Data', + fileName: null, + error: null, + uuid: null, + fileType: null, + }, + { + section: 'Aggregate Data', + fileName: null, + error: null, + uuid: null, + fileType: null, + }, + { + section: 'Stratum Data', + fileName: null, + error: null, + uuid: null, + fileType: null, + }, + ], + }) + }) + it('should handle SET_FILE_ERROR', () => { const fakeError = new Error({ message: 'something went wrong' }) expect(