diff --git a/tdrs-backend/tdpservice/data_files/serializers.py b/tdrs-backend/tdpservice/data_files/serializers.py
index 3e34bd421..4977f2ebd 100644
--- a/tdrs-backend/tdpservice/data_files/serializers.py
+++ b/tdrs-backend/tdpservice/data_files/serializers.py
@@ -12,6 +12,7 @@
from tdpservice.security.models import ClamAVFileScan
from tdpservice.stts.models import STT
from tdpservice.users.models import User
+from tdpservice.parsers.serializers import DataFileSummarySerializer
logger = logging.getLogger(__name__)
class DataFileSerializer(serializers.ModelSerializer):
@@ -22,6 +23,7 @@ class DataFileSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(queryset=User.objects.all())
ssp = serializers.BooleanField(write_only=True)
has_error = serializers.SerializerMethodField()
+ summary = DataFileSummarySerializer(many=False, read_only=True)
class Meta:
"""Metadata."""
@@ -45,6 +47,7 @@ class Meta:
's3_location',
's3_versioning_id',
'has_error',
+ 'summary'
]
read_only_fields = ("version",)
diff --git a/tdrs-backend/tdpservice/data_files/validators.py b/tdrs-backend/tdpservice/data_files/validators.py
index 2f78231cb..a4d0f0bc8 100644
--- a/tdrs-backend/tdpservice/data_files/validators.py
+++ b/tdrs-backend/tdpservice/data_files/validators.py
@@ -22,7 +22,7 @@
def _get_unsupported_msg(_type, value, supported_options):
"""Construct a message to convey an unsupported operation."""
return (
- f'Unsupported {_type}: {value}, supported {pluralize(_type)} '
+ f'Unsupported {_type}: supported {pluralize(_type)} '
f'are: {supported_options}'
)
diff --git a/tdrs-backend/tdpservice/parsers/migrations/0002_alter_parsererror_error_type.py b/tdrs-backend/tdpservice/parsers/migrations/0002_alter_parsererror_error_type.py
index e55c856ce..5236b5c29 100644
--- a/tdrs-backend/tdpservice/parsers/migrations/0002_alter_parsererror_error_type.py
+++ b/tdrs-backend/tdpservice/parsers/migrations/0002_alter_parsererror_error_type.py
@@ -14,5 +14,5 @@ class Migration(migrations.Migration):
model_name='parsererror',
name='error_type',
field=models.TextField(choices=[('1', 'File pre-check'), ('2', 'Record value invalid'), ('3', 'Record value consistency'), ('4', 'Case consistency'), ('5', 'Section consistency'), ('6', 'Historical consistency')], max_length=128),
- )
+ ),
]
diff --git a/tdrs-backend/tdpservice/parsers/migrations/0008_alter_datafilesummary_datafile.py b/tdrs-backend/tdpservice/parsers/migrations/0008_alter_datafilesummary_datafile.py
new file mode 100644
index 000000000..dc9f0b2f6
--- /dev/null
+++ b/tdrs-backend/tdpservice/parsers/migrations/0008_alter_datafilesummary_datafile.py
@@ -0,0 +1,20 @@
+# Generated by Django 3.2.15 on 2023-07-20 20:50
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('data_files', '0012_datafile_s3_versioning_id'),
+ ('parsers', '0007_datafilesummary'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='datafilesummary',
+ name='datafile',
+ field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='summary', to='data_files.datafile'),
+ ),
+ ]
diff --git a/tdrs-backend/tdpservice/parsers/migrations/0009_alter_datafilesummary_status.py b/tdrs-backend/tdpservice/parsers/migrations/0009_alter_datafilesummary_status.py
new file mode 100644
index 000000000..dd05c0f4f
--- /dev/null
+++ b/tdrs-backend/tdpservice/parsers/migrations/0009_alter_datafilesummary_status.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.15 on 2023-08-23 12:43
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('parsers', '0008_alter_datafilesummary_datafile'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='datafilesummary',
+ name='status',
+ field=models.CharField(choices=[('Pending', 'Pending'), ('Accepted', 'Accepted'), ('Accepted with Errors', 'Accepted With Errors'), ('Partially Accepted with Errors', 'Partially Accepted'), ('Rejected', 'Rejected')], default='Pending', max_length=50),
+ ),
+ ]
diff --git a/tdrs-backend/tdpservice/parsers/models.py b/tdrs-backend/tdpservice/parsers/models.py
index 0c0ccdc50..12c70a2c0 100644
--- a/tdrs-backend/tdpservice/parsers/models.py
+++ b/tdrs-backend/tdpservice/parsers/models.py
@@ -77,6 +77,7 @@ class Status(models.TextChoices):
PENDING = "Pending" # file has been uploaded, but not validated
ACCEPTED = "Accepted"
ACCEPTED_WITH_ERRORS = "Accepted with Errors"
+ PARTIALLY_ACCEPTED = "Partially Accepted with Errors"
REJECTED = "Rejected"
status = models.CharField(
@@ -85,7 +86,7 @@ class Status(models.TextChoices):
default=Status.PENDING,
)
- datafile = models.ForeignKey(DataFile, on_delete=models.CASCADE)
+ datafile = models.OneToOneField(DataFile, on_delete=models.CASCADE, related_name="summary")
case_aggregates = models.JSONField(null=True, blank=False)
@@ -100,11 +101,17 @@ def get_status(self):
.exclude(error_message__icontains="trailer")\
.exclude(error_message__icontains="Unknown Record_Type was found.")
+ row_precheck_errors = errors.filter(error_type=ParserErrorCategoryChoices.PRE_CHECK)\
+ .filter(field_name="Record_Type")\
+ .exclude(error_message__icontains="trailer")
+
if errors is None:
return DataFileSummary.Status.PENDING
elif errors.count() == 0:
return DataFileSummary.Status.ACCEPTED
elif precheck_errors.count() > 0:
return DataFileSummary.Status.REJECTED
+ elif row_precheck_errors.count() > 0:
+ return DataFileSummary.Status.PARTIALLY_ACCEPTED
else:
return DataFileSummary.Status.ACCEPTED_WITH_ERRORS
diff --git a/tdrs-backend/tdpservice/parsers/parse.py b/tdrs-backend/tdpservice/parsers/parse.py
index 409d239b8..05c3da3ca 100644
--- a/tdrs-backend/tdpservice/parsers/parse.py
+++ b/tdrs-backend/tdpservice/parsers/parse.py
@@ -20,7 +20,7 @@ def parse_datafile(datafile):
header_line = rawfile.readline().decode().strip()
header, header_is_valid, header_errors = schema_defs.header.parse_and_validate(
header_line,
- util.make_generate_parser_error(datafile, 1)
+ util.make_generate_file_precheck_parser_error(datafile, 1)
)
if not header_is_valid:
logger.info(f"Preparser Error: {len(header_errors)} header errors encountered.")
diff --git a/tdrs-backend/tdpservice/parsers/test/data/ADS.E2J.NDM1.TS53_fake.rollback b/tdrs-backend/tdpservice/parsers/test/data/ADS.E2J.NDM1.TS53_fake.rollback.txt
similarity index 100%
rename from tdrs-backend/tdpservice/parsers/test/data/ADS.E2J.NDM1.TS53_fake.rollback
rename to tdrs-backend/tdpservice/parsers/test/data/ADS.E2J.NDM1.TS53_fake.rollback.txt
diff --git a/tdrs-backend/tdpservice/parsers/test/data/small_bad_ssp_s1 b/tdrs-backend/tdpservice/parsers/test/data/small_bad_ssp_s1.txt
similarity index 100%
rename from tdrs-backend/tdpservice/parsers/test/data/small_bad_ssp_s1
rename to tdrs-backend/tdpservice/parsers/test/data/small_bad_ssp_s1.txt
diff --git a/tdrs-backend/tdpservice/parsers/test/data/small_bad_tanf_s1 b/tdrs-backend/tdpservice/parsers/test/data/small_bad_tanf_s1.txt
similarity index 100%
rename from tdrs-backend/tdpservice/parsers/test/data/small_bad_tanf_s1
rename to tdrs-backend/tdpservice/parsers/test/data/small_bad_tanf_s1.txt
diff --git a/tdrs-backend/tdpservice/parsers/test/data/small_correct_file b/tdrs-backend/tdpservice/parsers/test/data/small_correct_file.txt
similarity index 100%
rename from tdrs-backend/tdpservice/parsers/test/data/small_correct_file
rename to tdrs-backend/tdpservice/parsers/test/data/small_correct_file.txt
diff --git a/tdrs-backend/tdpservice/parsers/test/test_parse.py b/tdrs-backend/tdpservice/parsers/test/test_parse.py
index 9c785f79f..5a940a71b 100644
--- a/tdrs-backend/tdpservice/parsers/test/test_parse.py
+++ b/tdrs-backend/tdpservice/parsers/test/test_parse.py
@@ -19,7 +19,7 @@
@pytest.fixture
def test_datafile(stt_user, stt):
"""Fixture for small_correct_file."""
- return util.create_test_datafile('small_correct_file', stt_user, stt)
+ return util.create_test_datafile('small_correct_file.txt', stt_user, stt)
@pytest.fixture
def dfs():
@@ -43,7 +43,6 @@ def test_parse_small_correct_file(test_datafile, dfs):
]}
assert dfs.get_status() == DataFileSummary.Status.ACCEPTED
-
assert TANF_T1.objects.count() == 1
# spot check
@@ -597,7 +596,7 @@ def test_parse_super_big_s1_file_with_rollback(super_big_s1_rollback_file):
@pytest.fixture
def bad_tanf_s1__row_missing_required_field(stt_user, stt):
"""Fixture for small_tanf_section1."""
- return util.create_test_datafile('small_bad_tanf_s1', stt_user, stt)
+ return util.create_test_datafile('small_bad_tanf_s1.txt', stt_user, stt)
@pytest.mark.django_db
@@ -608,7 +607,7 @@ def test_parse_bad_tfs1_missing_required(bad_tanf_s1__row_missing_required_field
parse.parse_datafile(bad_tanf_s1__row_missing_required_field)
- assert dfs.get_status() == DataFileSummary.Status.ACCEPTED_WITH_ERRORS
+ assert dfs.get_status() == DataFileSummary.Status.PARTIALLY_ACCEPTED
parser_errors = ParserError.objects.filter(file=bad_tanf_s1__row_missing_required_field)
assert parser_errors.count() == 4
@@ -644,7 +643,7 @@ def test_parse_bad_tfs1_missing_required(bad_tanf_s1__row_missing_required_field
@pytest.fixture
def bad_ssp_s1__row_missing_required_field(stt_user, stt):
"""Fixture for ssp_section1_datafile."""
- return util.create_test_datafile('small_bad_ssp_s1', stt_user, stt, 'SSP Active Case Data')
+ return util.create_test_datafile('small_bad_ssp_s1.txt', stt_user, stt, 'SSP Active Case Data')
@pytest.mark.django_db()
diff --git a/tdrs-backend/tdpservice/parsers/util.py b/tdrs-backend/tdpservice/parsers/util.py
index 073b7b8d8..6ed48e44e 100644
--- a/tdrs-backend/tdpservice/parsers/util.py
+++ b/tdrs-backend/tdpservice/parsers/util.py
@@ -35,7 +35,7 @@ def generate_parser_error(datafile, line_number, schema, error_category, error_m
row_number=line_number,
column_number=getattr(field, 'item', None),
item_number=getattr(field, 'item', None),
- field_name=getattr(field, 'name', None),
+ field_name=getattr(field, 'name', None) if hasattr(field, 'name') else field,
rpt_month_year=getattr(record, 'RPT_MONTH_YEAR', None),
case_number=getattr(record, 'CASE_NUMBER', None),
error_message=error_message,
@@ -64,6 +64,22 @@ def generate(schema, error_category, error_message, record=None, field=None):
return generate
+def make_generate_file_precheck_parser_error(datafile, line_number):
+ """Configure a generate_parser_error that acts as a file pre-check error."""
+ def generate(schema, error_category, error_message, record=None, field=None):
+ return generate_parser_error(
+ datafile=datafile,
+ line_number=line_number,
+ schema=schema,
+ error_category=error_category,
+ error_message=error_message,
+ record=record,
+ field=None, # purposely overridden to force a "Rejected" status for certain file precheck errors
+ )
+
+ return generate
+
+
class SchemaManager:
"""Manages one or more RowSchema's and runs all parsers and validators."""
@@ -260,7 +276,6 @@ def transform_to_months(quarter):
case _:
raise ValueError("Invalid quarter value.")
-
def month_to_int(month):
"""Return the integer value of a month."""
return datetime.strptime(month, '%b').strftime('%m')
diff --git a/tdrs-backend/tdpservice/scheduling/parser_task.py b/tdrs-backend/tdpservice/scheduling/parser_task.py
index f9fab7f6f..be47703c5 100644
--- a/tdrs-backend/tdpservice/scheduling/parser_task.py
+++ b/tdrs-backend/tdpservice/scheduling/parser_task.py
@@ -19,7 +19,8 @@ def parse(data_file_id):
# for undetermined amount of time.
data_file = DataFile.objects.get(id=data_file_id)
- logger.info(f"DataFile parsing started for file -> {repr(data_file)}")
+ logger.info(f"DataFile parsing started for file {data_file.filename}")
+
dfs = DataFileSummary.objects.create(datafile=data_file, status=DataFileSummary.Status.PENDING)
errors = parse_datafile(data_file)
dfs.status = dfs.get_status()
diff --git a/tdrs-backend/tdpservice/search_indexes/test/test_model_mapping.py b/tdrs-backend/tdpservice/search_indexes/test/test_model_mapping.py
index e8e9eb81f..4d81eaac9 100644
--- a/tdrs-backend/tdpservice/search_indexes/test/test_model_mapping.py
+++ b/tdrs-backend/tdpservice/search_indexes/test/test_model_mapping.py
@@ -12,8 +12,8 @@
@pytest.fixture
def test_datafile(stt_user, stt):
- """Fixture for small_correct_file."""
- return create_test_datafile('small_correct_file', stt_user, stt)
+ """Fixture for small_correct_file.txt."""
+ return create_test_datafile('small_correct_file.txt', stt_user, stt)
@pytest.mark.django_db
diff --git a/tdrs-frontend/cypress/e2e/integration/file_upload.js b/tdrs-frontend/cypress/e2e/integration/file_upload.js
index a92b26d52..35cd400ab 100644
--- a/tdrs-frontend/cypress/e2e/integration/file_upload.js
+++ b/tdrs-frontend/cypress/e2e/integration/file_upload.js
@@ -5,7 +5,7 @@ Then('{string} can see Data Files page', (username) => {
cy.visit('/data-files')
cy.contains('Data Files').should('exist')
})
-
+
Then('{string} can see search form', (username) => {
cy.contains('Fiscal Year').should('exist')
cy.contains('Quarter').should('exist')
@@ -19,7 +19,7 @@ Then('{string} can browse upload file form', (username) => {
When('{string} uploads a file', (username) => {
cy.get('button').contains('Search').should('exist').click()
- cy.get('#closed-case-data').selectFile('../tdrs-backend/tdpservice/parsers/test/data/small_correct_file',{ action: 'drag-drop' })
+ cy.get('#closed-case-data').selectFile('../tdrs-backend/tdpservice/parsers/test/data/small_correct_file.txt',{ action: 'drag-drop' })
cy.get('button').contains('Submit Data Files').should('exist').click()
})
diff --git a/tdrs-frontend/src/actions/reports.js b/tdrs-frontend/src/actions/reports.js
index fa5e4a480..8ecb8839e 100644
--- a/tdrs-frontend/src/actions/reports.js
+++ b/tdrs-frontend/src/actions/reports.js
@@ -11,6 +11,7 @@ export const SET_FILE = 'SET_FILE'
export const CLEAR_FILE = 'CLEAR_FILE'
export const CLEAR_FILE_LIST = 'CLEAR_FILE_LIST'
export const SET_FILE_ERROR = 'SET_FILE_ERROR'
+export const FILE_EXT_ERROR = 'FILE_EXT_ERROR'
export const SET_FILE_SUBMITTED = 'SET_FILE_SUBMITTED'
export const CLEAR_ERROR = 'CLEAR_ERROR'
@@ -254,7 +255,11 @@ export const submit =
setLocalAlertState({
active: true,
type: 'error',
- message: error.message,
+ message: ''.concat(
+ error.message,
+ ': ',
+ error.response?.data?.file[0]
+ ),
})
)
}
diff --git a/tdrs-frontend/src/actions/reports.test.js b/tdrs-frontend/src/actions/reports.test.js
index 54bde8d6c..40593f3bb 100644
--- a/tdrs-frontend/src/actions/reports.test.js
+++ b/tdrs-frontend/src/actions/reports.test.js
@@ -201,7 +201,7 @@ describe('actions/reports', () => {
expect(axios.post).toHaveBeenCalledTimes(1)
expect(setLocalAlertState).toHaveBeenCalledWith({
active: true,
- message: undefined,
+ message: 'undefined: undefined',
type: 'error',
})
})
diff --git a/tdrs-frontend/src/actions/requestAccess.js b/tdrs-frontend/src/actions/requestAccess.js
index 668420c65..50c1f0ce5 100644
--- a/tdrs-frontend/src/actions/requestAccess.js
+++ b/tdrs-frontend/src/actions/requestAccess.js
@@ -1,6 +1,5 @@
import { SET_AUTH } from './auth'
import axios from 'axios'
-import axiosInstance from '../axios-instance'
import { logErrorToServer } from '../utils/eventLogger'
export const PATCH_REQUEST_ACCESS = 'PATCH_REQUEST_ACCESS'
diff --git a/tdrs-frontend/src/assets/Reports.scss b/tdrs-frontend/src/assets/Reports.scss
index f515530a6..58b89ce6c 100644
--- a/tdrs-frontend/src/assets/Reports.scss
+++ b/tdrs-frontend/src/assets/Reports.scss
@@ -35,6 +35,7 @@
border: none;
text-decoration: underline;
color: #264A64;
+ text-align: left;
}
.section-download:hover {
diff --git a/tdrs-frontend/src/components/FileUpload/FileUpload.jsx b/tdrs-frontend/src/components/FileUpload/FileUpload.jsx
index 0deea1216..d8a21476b 100644
--- a/tdrs-frontend/src/components/FileUpload/FileUpload.jsx
+++ b/tdrs-frontend/src/components/FileUpload/FileUpload.jsx
@@ -7,6 +7,7 @@ import {
clearError,
clearFile,
SET_FILE_ERROR,
+ FILE_EXT_ERROR,
upload,
download,
} from '../../actions/reports'
@@ -17,6 +18,9 @@ import { handlePreview, getTargetClassName } from './utils'
const INVALID_FILE_ERROR =
'We can’t process that file format. Please provide a plain text file.'
+const INVALID_EXT_ERROR =
+ 'Invalid extension. Accepted file types are: .txt, .ms##, .ts##, or .ts###.'
+
function FileUpload({ section, setLocalAlertState }) {
// e.g. 'Aggregate Case Data' => 'aggregate-case-data'
// The set of uploaded files in our Redux state
@@ -31,6 +35,10 @@ function FileUpload({ section, setLocalAlertState }) {
(file) => file.section.includes(sectionName) && file.uuid
)
+ const hasPreview = files?.some(
+ (file) => file.section.includes(sectionName) && file.name
+ )
+
const selectedFile = files?.find((file) => file.section.includes(sectionName))
const formattedSectionName = selectedFile?.section
@@ -54,8 +62,10 @@ function FileUpload({ section, setLocalAlertState }) {
setTimeout(trySettingPreview, 100)
}
}
- if (hasFile) trySettingPreview()
- }, [hasFile, fileName, targetClassName])
+ if (hasPreview || hasFile) {
+ trySettingPreview()
+ }
+ }, [hasPreview, hasFile, fileName, targetClassName])
const downloadFile = ({ target }) => {
dispatch(clearError({ section: sectionName }))
@@ -89,6 +99,19 @@ function FileUpload({ section, setLocalAlertState }) {
filereader.onloadend = (evt) => {
/* istanbul ignore next */
if (!evt.target.error) {
+ // Validate file extension before proceeding
+ const re = /(\.txt|\.ms\d{2}|\.ts\d{2,3})$/i
+ if (!re.exec(file.name)) {
+ dispatch({
+ type: FILE_EXT_ERROR,
+ payload: {
+ error: { message: INVALID_EXT_ERROR },
+ section,
+ },
+ })
+ return
+ }
+
// Read in the file blob "headers: and create a hex string signature
const uint = new Uint8Array(evt.target.result)
const bytes = []
diff --git a/tdrs-frontend/src/components/Paginator/Paginator.test.js b/tdrs-frontend/src/components/Paginator/Paginator.test.js
index 5eb868856..06390986d 100644
--- a/tdrs-frontend/src/components/Paginator/Paginator.test.js
+++ b/tdrs-frontend/src/components/Paginator/Paginator.test.js
@@ -1,5 +1,5 @@
import React from 'react'
-import { render, fireEvent, waitFor, screen } from '@testing-library/react'
+import { render, fireEvent, screen } from '@testing-library/react'
import Paginator from './Paginator'
describe('Paginator', () => {
diff --git a/tdrs-frontend/src/components/SiteMap/SiteMap.test.js b/tdrs-frontend/src/components/SiteMap/SiteMap.test.js
index 2b435064d..236c653fe 100644
--- a/tdrs-frontend/src/components/SiteMap/SiteMap.test.js
+++ b/tdrs-frontend/src/components/SiteMap/SiteMap.test.js
@@ -1,7 +1,6 @@
import React from 'react'
import { render } from '@testing-library/react'
import SiteMap from './SiteMap'
-import { mount } from 'enzyme'
import thunk from 'redux-thunk'
import { Provider } from 'react-redux'
import configureStore from 'redux-mock-store'
diff --git a/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx b/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx
index 844b0c744..55df49364 100644
--- a/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx
+++ b/tdrs-frontend/src/components/SubmissionHistory/SubmissionHistory.jsx
@@ -3,6 +3,13 @@ import axios from 'axios'
import PropTypes from 'prop-types'
import { useDispatch, useSelector } from 'react-redux'
import { fileUploadSections } from '../../reducers/reports'
+import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
+import {
+ faCheckCircle,
+ faExclamationCircle,
+ faXmarkCircle,
+ faClock,
+} from '@fortawesome/free-solid-svg-icons'
import Paginator from '../Paginator'
import { getAvailableFileList, download } from '../../actions/reports'
import { useEffect } from 'react'
@@ -11,6 +18,96 @@ import { getParseErrors } from '../../actions/createXLSReport'
const formatDate = (dateStr) => new Date(dateStr).toLocaleString()
+const SubmissionSummaryStatusIcon = ({ status }) => {
+ let icon = null
+ let color = null
+
+ switch (status) {
+ case 'Pending':
+ icon = faClock
+ color = '#005EA2'
+ break
+ case 'Accepted':
+ icon = faCheckCircle
+ color = '#40bb45'
+ break
+ case 'Partially Accepted with Errors':
+ icon = faExclamationCircle
+ color = '#ec4e11'
+ break
+ case 'Accepted with Errors':
+ icon = faExclamationCircle
+ color = '#ec4e11'
+ break
+ case 'Rejected':
+ icon = faXmarkCircle
+ color = '#bb0000'
+ break
+ default:
+ break
+ }
+ return (
+
Submitted On | -Submitted By | -File Name | -Error Reports (In development) | ++ Submitted On + | ++ Submitted By + | ++ File Name + | ++ Status + | ++ Error Reports (In development) + |
---|