diff --git a/tdrs-backend/tdpservice/parsers/aggregates.py b/tdrs-backend/tdpservice/parsers/aggregates.py index 8f27a7445..6f35f964b 100644 --- a/tdrs-backend/tdpservice/parsers/aggregates.py +++ b/tdrs-backend/tdpservice/parsers/aggregates.py @@ -55,3 +55,28 @@ def case_aggregates_by_month(df, dfs_status): aggregate_data['rejected'] = ParserError.objects.filter(file=df).filter(case_number=None).count() return aggregate_data + + +def total_errors_by_month(df, dfs_status): + """Return total errors for each month in the reporting period.""" + calendar_year, calendar_qtr = fiscal_to_calendar(df.year, df.quarter) + month_list = transform_to_months(calendar_qtr) + + total_errors_data = {"months": []} + + errors = ParserError.objects.all().filter(file=df) + + for month in month_list: + if dfs_status == "Rejected": + total_errors_data["months"].append( + {"month": month, "total_errors": "N/A"}) + continue + + month_int = month_to_int(month) + rpt_month_year = int(f"{calendar_year}{month_int}") + + error_count = errors.filter(rpt_month_year=rpt_month_year).count() + total_errors_data["months"].append( + {"month": month, "total_errors": error_count}) + + return total_errors_data diff --git a/tdrs-backend/tdpservice/parsers/test/data/tanf_section4_with_errors.txt b/tdrs-backend/tdpservice/parsers/test/data/tanf_section4_with_errors.txt new file mode 100644 index 000000000..53590eccf --- /dev/null +++ b/tdrs-backend/tdpservice/parsers/test/data/tanf_section4_with_errors.txt @@ -0,0 +1,3 @@ +HEADER20204S06 TAN1 N +T720204700006853700680540068454103000312400000000003180104000347400036460003583106000044600004360000325299000506200036070003385202000039100002740000499 +TRAILER0000001 \ No newline at end of file diff --git a/tdrs-backend/tdpservice/parsers/test/test_parse.py b/tdrs-backend/tdpservice/parsers/test/test_parse.py index 7006286e6..24e49f5a7 100644 --- a/tdrs-backend/tdpservice/parsers/test/test_parse.py +++ b/tdrs-backend/tdpservice/parsers/test/test_parse.py @@ -46,7 +46,6 @@ def test_parse_small_correct_file(test_datafile, dfs): test_datafile.quarter = 'Q1' test_datafile.save() dfs.datafile = test_datafile - dfs.save() parse.parse_datafile(test_datafile) @@ -85,7 +84,6 @@ def test_parse_section_mismatch(test_datafile, dfs): test_datafile.save() dfs.datafile = test_datafile - dfs.save() errors = parse.parse_datafile(test_datafile) dfs.status = dfs.get_status() @@ -126,7 +124,6 @@ def test_parse_wrong_program_type(test_datafile, dfs): test_datafile.save() dfs.datafile = test_datafile - dfs.save() errors = parse.parse_datafile(test_datafile) assert dfs.get_status() == DataFileSummary.Status.REJECTED @@ -159,7 +156,6 @@ def test_parse_big_file(test_big_file, dfs): expected_t3_record_count = 1376 dfs.datafile = test_big_file - dfs.save() parse.parse_datafile(test_big_file) dfs.status = dfs.get_status() @@ -236,7 +232,6 @@ def test_parse_bad_file_missing_header(bad_file_missing_header, dfs): """Test parsing of bad_missing_header.""" errors = parse.parse_datafile(bad_file_missing_header) dfs.datafile = bad_file_missing_header - dfs.save() assert dfs.get_status() == DataFileSummary.Status.REJECTED parser_errors = ParserError.objects.filter(file=bad_file_missing_header).order_by('created_at') @@ -269,7 +264,6 @@ def test_parse_bad_file_multiple_headers(bad_file_multiple_headers, dfs): bad_file_multiple_headers.save() errors = parse.parse_datafile(bad_file_multiple_headers) dfs.datafile = bad_file_multiple_headers - dfs.save() assert dfs.get_status() == DataFileSummary.Status.REJECTED parser_errors = ParserError.objects.filter(file=bad_file_multiple_headers) @@ -322,7 +316,6 @@ def test_parse_bad_trailer_file(bad_trailer_file, dfs): bad_trailer_file.year = 2021 bad_trailer_file.quarter = 'Q1' dfs.datafile = bad_trailer_file - dfs.save() errors = parse.parse_datafile(bad_trailer_file) @@ -408,7 +401,6 @@ def empty_file(stt_user, stt): def test_parse_empty_file(empty_file, dfs): """Test parsing of empty_file.""" dfs.datafile = empty_file - dfs.save() errors = parse.parse_datafile(empty_file) dfs.status = dfs.get_status() @@ -461,7 +453,7 @@ def test_parse_small_ssp_section1_datafile(small_ssp_section1_datafile, dfs): expected_m3_record_count = 8 dfs.datafile = small_ssp_section1_datafile - dfs.save() + parse.parse_datafile(small_ssp_section1_datafile) parser_errors = ParserError.objects.filter(file=small_ssp_section1_datafile) @@ -536,7 +528,6 @@ def test_parse_tanf_section1_datafile(small_tanf_section1_datafile, dfs): small_tanf_section1_datafile.year = 2021 small_tanf_section1_datafile.quarter = 'Q1' dfs.datafile = small_tanf_section1_datafile - dfs.save() parse.parse_datafile(small_tanf_section1_datafile) @@ -713,7 +704,6 @@ def test_parse_bad_tfs1_missing_required(bad_tanf_s1__row_missing_required_field bad_tanf_s1__row_missing_required_field.quarter = 'Q1' dfs.datafile = bad_tanf_s1__row_missing_required_field - dfs.save() parse.parse_datafile(bad_tanf_s1__row_missing_required_field) @@ -818,11 +808,9 @@ def test_dfs_set_case_aggregates(test_datafile, dfs): # this still needs to execute to create db objects to be queried parse.parse_datafile(test_datafile) dfs.file = test_datafile - dfs.save() dfs.status = dfs.get_status() dfs.case_aggregates = aggregates.case_aggregates_by_month( test_datafile, dfs.status) - dfs.save() for month in dfs.case_aggregates['months']: if month['month'] == 'Oct': @@ -938,13 +926,26 @@ def tanf_section3_file(stt_user, stt): @pytest.mark.django_db() -def test_parse_tanf_section3_file(tanf_section3_file): +def test_parse_tanf_section3_file(tanf_section3_file, dfs): """Test parsing TANF Section 3 submission.""" tanf_section3_file.year = 2021 tanf_section3_file.quarter = 'Q1' + dfs.datafile = tanf_section3_file + parse.parse_datafile(tanf_section3_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 0}, + {"month": "Nov", "total_errors": 0}, + {"month": "Dec", "total_errors": 0} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED + assert TANF_T6.objects.all().count() == 3 parser_errors = ParserError.objects.filter(file=tanf_section3_file) @@ -1004,13 +1005,26 @@ def tanf_section4_file(stt_user, stt): @pytest.mark.django_db() -def test_parse_tanf_section4_file(tanf_section4_file): +def test_parse_tanf_section4_file(tanf_section4_file, dfs): """Test parsing TANF Section 4 submission.""" tanf_section4_file.year = 2021 tanf_section4_file.quarter = 'Q1' + dfs.datafile = tanf_section4_file + parse.parse_datafile(tanf_section4_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 0}, + {"month": "Nov", "total_errors": 0}, + {"month": "Dec", "total_errors": 0} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED + assert TANF_T7.objects.all().count() == 18 parser_errors = ParserError.objects.filter(file=tanf_section4_file) @@ -1036,15 +1050,25 @@ def ssp_section4_file(stt_user, stt): return util.create_test_datafile('ADS.E2J.NDM4.MS24', stt_user, stt, "SSP Stratum Data") @pytest.mark.django_db() -def test_parse_ssp_section4_file(ssp_section4_file): +def test_parse_ssp_section4_file(ssp_section4_file, dfs): """Test parsing SSP Section 4 submission.""" ssp_section4_file.year = 2019 ssp_section4_file.quarter = 'Q1' + dfs.datafile = ssp_section4_file parse.parse_datafile(ssp_section4_file) m7_objs = SSP_M7.objects.all().order_by('FAMILIES_MONTH') + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 0}, + {"month": "Nov", "total_errors": 0}, + {"month": "Dec", "total_errors": 0} + ]} + assert m7_objs.count() == 12 first = m7_objs.first() @@ -1057,13 +1081,30 @@ def ssp_section2_file(stt_user, stt): return util.create_test_datafile('ADS.E2J.NDM2.MS24', stt_user, stt, 'SSP Closed Case Data') @pytest.mark.django_db() -def test_parse_ssp_section2_file(ssp_section2_file): +def test_parse_ssp_section2_file(ssp_section2_file, dfs): """Test parsing SSP Section 2 submission.""" ssp_section2_file.year = 2019 ssp_section2_file.quarter = 'Q1' + dfs.datafile = ssp_section2_file + parse.parse_datafile(ssp_section2_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.case_aggregates_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {'rejected': 0, + 'months': [ + {'accepted_without_errors': 0, + 'accepted_with_errors': 78, 'month': 'Oct'}, + {'accepted_without_errors': 0, + 'accepted_with_errors': 78, 'month': 'Nov'}, + {'accepted_without_errors': 0, + 'accepted_with_errors': 75, 'month': 'Dec'} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED_WITH_ERRORS + m4_objs = SSP_M4.objects.all().order_by('id') m5_objs = SSP_M5.objects.all().order_by('AMOUNT_EARNED_INCOME') @@ -1102,12 +1143,25 @@ def ssp_section3_file(stt_user, stt): return util.create_test_datafile('ADS.E2J.NDM3.MS24', stt_user, stt, "SSP Aggregate Data") @pytest.mark.django_db() -def test_parse_ssp_section3_file(ssp_section3_file): +def test_parse_ssp_section3_file(ssp_section3_file, dfs): """Test parsing TANF Section 3 submission.""" ssp_section3_file.year = 2019 ssp_section3_file.quarter = 'Q1' + dfs.datafile = ssp_section3_file + parse.parse_datafile(ssp_section3_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 0}, + {"month": "Nov", "total_errors": 0}, + {"month": "Dec", "total_errors": 0} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED + m6_objs = SSP_M6.objects.all().order_by('RPT_MONTH_YEAR') assert m6_objs.count() == 3 @@ -1173,7 +1227,6 @@ def test_parse_tribal_section_1_file(tribal_section_1_file, dfs): tribal_section_1_file.save() dfs.datafile = tribal_section_1_file - dfs.save() parse.parse_datafile(tribal_section_1_file) @@ -1227,12 +1280,29 @@ def tribal_section_2_file(stt_user, stt): return util.create_test_datafile('ADS.E2J.FTP2.TS142.txt', stt_user, stt, "Tribal Closed Case Data") @pytest.mark.django_db() -def test_parse_tribal_section_2_file(tribal_section_2_file): +def test_parse_tribal_section_2_file(tribal_section_2_file, dfs): """Test parsing Tribal TANF Section 2 submission.""" tribal_section_2_file.year = 2020 tribal_section_2_file.quarter = 'Q1' + dfs.datafile = tribal_section_2_file + parse.parse_datafile(tribal_section_2_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.case_aggregates_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {'rejected': 0, + 'months': [ + {'accepted_without_errors': 0, + 'accepted_with_errors': 3, 'month': 'Oct'}, + {'accepted_without_errors': 0, + 'accepted_with_errors': 3, 'month': 'Nov'}, + {'accepted_without_errors': 0, + 'accepted_with_errors': 0, 'month': 'Dec'} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED_WITH_ERRORS + assert Tribal_TANF_T4.objects.all().count() == 6 assert Tribal_TANF_T5.objects.all().count() == 13 @@ -1251,13 +1321,26 @@ def tribal_section_3_file(stt_user, stt): return util.create_test_datafile('ADS.E2J.FTP3.TS142', stt_user, stt, "Tribal Aggregate Data") @pytest.mark.django_db() -def test_parse_tribal_section_3_file(tribal_section_3_file): +def test_parse_tribal_section_3_file(tribal_section_3_file, dfs): """Test parsing Tribal TANF Section 3 submission.""" tribal_section_3_file.year = 2020 tribal_section_3_file.quarter = 'Q1' + dfs.datafile = tribal_section_3_file + parse.parse_datafile(tribal_section_3_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 0}, + {"month": "Nov", "total_errors": 0}, + {"month": "Dec", "total_errors": 0} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED + assert Tribal_TANF_T6.objects.all().count() == 3 t6_objs = Tribal_TANF_T6.objects.all().order_by("NUM_APPLICATIONS") @@ -1274,12 +1357,23 @@ def tribal_section_4_file(stt_user, stt): return util.create_test_datafile('tribal_section_4_fake.txt', stt_user, stt, "Tribal Stratum Data") @pytest.mark.django_db() -def test_parse_tribal_section_4_file(tribal_section_4_file): +def test_parse_tribal_section_4_file(tribal_section_4_file, dfs): """Test parsing Tribal TANF Section 4 submission.""" tribal_section_4_file.year = 2020 tribal_section_4_file.quarter = 'Q1' + dfs.datafile = tribal_section_4_file + parse.parse_datafile(tribal_section_4_file) + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 0}, + {"month": "Nov", "total_errors": 0}, + {"month": "Dec", "total_errors": 0} + ]} + assert Tribal_TANF_T7.objects.all().count() == 18 t7_objs = Tribal_TANF_T7.objects.all().order_by('FAMILIES_MONTH') @@ -1330,3 +1424,46 @@ def test_bulk_create_returns_rollback_response_on_bulk_index_exception(test_data assert TANF_T1.objects.all().count() == 1 assert TANF_T2.objects.all().count() == 0 assert TANF_T3.objects.all().count() == 0 + + +@pytest.fixture +def tanf_section_4_file_with_errors(stt_user, stt): + """Fixture for tanf_section4_with_errors.""" + return util.create_test_datafile('tanf_section4_with_errors.txt', stt_user, stt, "Stratum Data") + +@pytest.mark.django_db() +def test_parse_tanf_section4_file_with_errors(tanf_section_4_file_with_errors, dfs): + """Test parsing TANF Section 4 submission.""" + dfs.datafile = tanf_section_4_file_with_errors + + parse.parse_datafile(tanf_section_4_file_with_errors) + + dfs.status = dfs.get_status() + dfs.case_aggregates = aggregates.total_errors_by_month( + dfs.datafile, dfs.status) + assert dfs.case_aggregates == {"months": [ + {"month": "Oct", "total_errors": 2}, + {"month": "Nov", "total_errors": 2}, + {"month": "Dec", "total_errors": 2} + ]} + + assert dfs.get_status() == DataFileSummary.Status.ACCEPTED_WITH_ERRORS + + assert TANF_T7.objects.all().count() == 18 + + parser_errors = ParserError.objects.filter(file=tanf_section_4_file_with_errors) + assert parser_errors.count() == 6 + + t7_objs = TANF_T7.objects.all().order_by('FAMILIES_MONTH') + + first = t7_objs.first() + sixth = t7_objs[5] + + assert first.RPT_MONTH_YEAR == 202011 + assert sixth.RPT_MONTH_YEAR == 202010 + + assert first.TDRS_SECTION_IND == '1' + assert sixth.TDRS_SECTION_IND == '1' + + assert first.FAMILIES_MONTH == 0 + assert sixth.FAMILIES_MONTH == 446 diff --git a/tdrs-backend/tdpservice/scheduling/parser_task.py b/tdrs-backend/tdpservice/scheduling/parser_task.py index a7462f0e3..b40abb206 100644 --- a/tdrs-backend/tdpservice/scheduling/parser_task.py +++ b/tdrs-backend/tdpservice/scheduling/parser_task.py @@ -5,7 +5,7 @@ from tdpservice.data_files.models import DataFile from tdpservice.parsers.parse import parse_datafile from tdpservice.parsers.models import DataFileSummary -from tdpservice.parsers.aggregates import case_aggregates_by_month +from tdpservice.parsers.aggregates import case_aggregates_by_month, total_errors_by_month logger = logging.getLogger(__name__) @@ -27,6 +27,8 @@ def parse(data_file_id): if "Case Data" in data_file.section: dfs.case_aggregates = case_aggregates_by_month(data_file, dfs.status) + else: + dfs.case_aggregates = total_errors_by_month(data_file, dfs.status) dfs.save() diff --git a/tdrs-frontend/src/components/SubmissionHistory/CaseAggregatesTable.jsx b/tdrs-frontend/src/components/SubmissionHistory/CaseAggregatesTable.jsx new file mode 100644 index 000000000..9800206c6 --- /dev/null +++ b/tdrs-frontend/src/components/SubmissionHistory/CaseAggregatesTable.jsx @@ -0,0 +1,134 @@ +import React from 'react' +import { useDispatch } from 'react-redux' +import { + SubmissionSummaryStatusIcon, + formatDate, + downloadFile, + downloadErrorReport, +} from './helpers' + +const MonthSubRow = ({ data }) => + data ? ( + <> +
- Submitted On - | -- Submitted By - | -- File Name - | -- Status - | -- Error Reports (In development) - | -{data.month} | +{data.total_errors} | + > + ) : ( + <> +- | +N/A | + > + ) + +const TotalAggregatesRow = ({ file }) => { + const dispatch = useDispatch() + const errorFileName = `${file.year}-${file.quarter}-${file.section}` + + return ( + <> +
---|---|---|---|---|---|---|
+ {formatDate(file.createdAt)} + | + ++ {file.submittedBy} + | + ++ + | + +
+
+ |
+
+ + {file.summary && + file.summary.status && + file.summary.status !== 'Pending' ? ( + file.hasError > 0 ? ( + + ) : ( + 'No Errors' + ) + ) : ( + 'Pending' + )} + | +||
+ Submitted On + | ++ Submitted By + | ++ File Name + | ++ Month + | ++ Total Errors + | ++ Status + | ++ Error Reports (In development) + | +