Skip to content

Commit

Permalink
Merge branch 'develop' into 2883-kibana-dashboard
Browse files Browse the repository at this point in the history
  • Loading branch information
elipe17 authored Jul 26, 2024
2 parents 5cac334 + 3342836 commit 25717db
Show file tree
Hide file tree
Showing 8 changed files with 75 additions and 10 deletions.
2 changes: 1 addition & 1 deletion tdrs-backend/docker-compose.local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version: "3.4"

services:
postgres:
image: postgres:11.6
image: postgres:15.7
environment:
- PGDATA=/var/lib/postgresql/data/
- POSTGRES_DB=tdrs_test
Expand Down
6 changes: 5 additions & 1 deletion tdrs-backend/tdpservice/parsers/schema_defs/header.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,11 @@
startIndex=22,
endIndex=23,
required=True,
validators=[validators.matches("D")],
validators=[validators.matches("D",
error_func=lambda eargs: ("HEADER Update Indicator must be set to D "
f"instead of {eargs.value}. Please review "
"Exporting Complete Data Using FTANF in the "
"Knowledge Center."))],
),
],
)
34 changes: 34 additions & 0 deletions tdrs-backend/tdpservice/parsers/test/test_header.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,37 @@ def test_header_cleanup(test_datafile):

assert header_is_valid
assert header_errors == []

@pytest.mark.parametrize("header_line, is_valid, error", [
# Title error
(" 20204A06 TAN1ED", False, "Your file does not begin with a HEADER record."),
# quarter error
("HEADER20205A06 TAN1ED", False, "HEADER Item 5 (quarter): 5 is not in [1, 2, 3, 4]."),
# Type error
("HEADER20204E06 TAN1ED", False, "HEADER Item 6 (type): E is not in [A, C, G, S]."),
# Fips error
("HEADER20204A07 TAN1ED", False, ("HEADER Item 1 (state fips): 07 is not in [00, 01, 02, 04, 05, 06, 08, 09, "
"10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, "
"30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 49, "
"50, 51, 53, 54, 55, 56, 66, 72, 78].")),
# Tribe error
("HEADER20204A00 -1TAN1ED", False, "HEADER Item 3 (tribe code): -1 is not in range [0, 999]."),
# Program type error
("HEADER20204A06 BAD1ED", False, "HEADER Item 7 (program type): BAD is not in [TAN, SSP]."),
# Edit error
("HEADER20204A06 TAN3ED", False, "HEADER Item 8 (edit): 3 is not in [1, 2]."),
# Encryption error
("HEADER20204A06 TAN1AD", False, "HEADER Item 9 (encryption): A is not in [ , E]."),
# Update error
("HEADER20204A06 TAN1EA", False, ("HEADER Update Indicator must be set to D instead of A. Please review "
"Exporting Complete Data Using FTANF in the Knowledge Center.")),
])
@pytest.mark.django_db
def test_header_fields(test_datafile, header_line, is_valid, error):
"""Test validate all header fields."""
generate_error = util.make_generate_parser_error(test_datafile, 1)
header, header_is_valid, header_errors = schema_defs.header.parse_and_validate(header_line,
generate_error)

assert is_valid == header_is_valid
assert error == header_errors[0].error_message
5 changes: 4 additions & 1 deletion tdrs-backend/tdpservice/parsers/test/test_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -1615,7 +1615,10 @@ def test_parse_tanf_section_1_file_with_bad_update_indicator(tanf_section_1_file
error = parser_errors.first()

assert error.error_type == ParserErrorCategoryChoices.FIELD_VALUE
assert error.error_message == "HEADER Item 10 (update indicator): U does not match D."
assert error.error_message == ("HEADER Update Indicator must be set to D "
"instead of U. Please review "
"Exporting Complete Data Using FTANF in the "
"Knowledge Center.")


@pytest.mark.django_db()
Expand Down
27 changes: 25 additions & 2 deletions tdrs-backend/tdpservice/parsers/test/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,13 @@
from datetime import datetime
from ..fields import Field
from ..row_schema import RowSchema, SchemaManager
from ..util import make_generate_parser_error, create_test_datafile, get_years_apart, clean_options_string

from ..util import (
make_generate_parser_error,
create_test_datafile,
get_years_apart,
clean_options_string,
generate_t2_t3_t5_hashes)
import logging

def passing_validator():
"""Fake validator that always returns valid."""
Expand Down Expand Up @@ -553,3 +558,21 @@ def test_clean_options_string(options, expected):
"""Test `clean_options_string` util func."""
result = clean_options_string(options)
assert result == expected


@pytest.mark.django_db()
def test_empty_SSN_DOB_space_filled(caplog):
"""Test empty_SSN_DOB_space_filled."""
line = 'fake_line'

class record:
CASE_NUMBER = 'fake_case_number'
SSN = None
DATE_OF_BIRTH = None
FAMILY_AFFILIATION = 'fake_family_affiliation'
RPT_MONTH_YEAR = '202310'
RecordType = 'T2'

with caplog.at_level(logging.ERROR):
generate_t2_t3_t5_hashes(line, record)
assert caplog.text == ''
7 changes: 4 additions & 3 deletions tdrs-backend/tdpservice/parsers/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,14 +281,15 @@ def remove_case_due_to_errors(self, should_remove, case_hash):
def generate_t1_t4_hashes(line, record):
"""Return hashes for duplicate and partial duplicate detection for T1 & T4 records."""
logger.debug(f"Partial Hash Field Values: {record.RecordType} {str(record.RPT_MONTH_YEAR)} {record.CASE_NUMBER}")
return hash(line), hash(record.RecordType + str(record.RPT_MONTH_YEAR) + record.CASE_NUMBER)
return hash(line), hash(record.RecordType + str(record.RPT_MONTH_YEAR or '') + str(record.CASE_NUMBER or ''))

def generate_t2_t3_t5_hashes(line, record):
"""Return hashes for duplicate and partial duplicate detection for T2 & T3 & T5 records."""
logger.debug(f"Partial Hash Field Values: {record.RecordType} {str(record.RPT_MONTH_YEAR)} {record.CASE_NUMBER} " +
f"{str(record.FAMILY_AFFILIATION)} {record.DATE_OF_BIRTH} {record.SSN}")
return hash(line), hash(record.RecordType + str(record.RPT_MONTH_YEAR) + record.CASE_NUMBER +
str(record.FAMILY_AFFILIATION) + record.DATE_OF_BIRTH + record.SSN)
return hash(line), hash(record.RecordType + str(record.RPT_MONTH_YEAR or '') + str(record.CASE_NUMBER or '') +
str(record.FAMILY_AFFILIATION or '') + str(record.DATE_OF_BIRTH or '') +
str(record.SSN or ''))

def get_t1_t4_partial_hash_members():
"""Return field names used to generate t1/t4 partial hashes."""
Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/tdpservice/parsers/validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def matches(option, error_func=None):
"""Validate that value is equal to option."""
return make_validator(
lambda value: value == option,
lambda eargs: error_func(option)
lambda eargs: error_func(eargs)
if error_func
else f"{format_error_context(eargs)} {eargs.value} does not match {option}.",
)
Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/tdpservice/search_indexes/admin/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_choices = self._get_lookup_choices(request)

def _get_lookup_choices(self, request):
"""Filter queryset to guarentee lookup_choices only has STTs associated with the record type."""
"""Filter queryset to guarantee lookup_choices only has STTs associated with the record type."""
record_type = str(request.path).split('/')[-2]
queryset = STT.objects.all()
if 'tribal' in record_type:
Expand Down

0 comments on commit 25717db

Please sign in to comment.