diff --git a/.circleci/build-and-test/workflows.yml b/.circleci/build-and-test/workflows.yml
index b822f1cdc..99d7c4fff 100644
--- a/.circleci/build-and-test/workflows.yml
+++ b/.circleci/build-and-test/workflows.yml
@@ -49,6 +49,12 @@
                 - /^release.*/
           requires:
             - secrets-check
+      - make_erd: # from ../util folder
+          filters:
+            branches:
+              only:
+                - develop
+                - master
 
   build-and-test-backend:
     when: << pipeline.parameters.build_and_test_backend >>
diff --git a/.circleci/util/jobs.yml b/.circleci/util/jobs.yml
index 3cd1bfe12..4752ad5a0 100644
--- a/.circleci/util/jobs.yml
+++ b/.circleci/util/jobs.yml
@@ -4,6 +4,7 @@
     steps:
       - checkout
       - docker-compose-check
+      - docker-login
       - run:
           name: Run graph_models
           command: |
@@ -13,5 +14,9 @@
             fi
             docker-compose run --rm web bash -c \
             "./manage.py graph_models -a -g -o tdp_erd.png"
+            if [[ ! -f tdp_erd.png ]]; then
+              echo "Entity Relationship Diagram not found."
+              exit 1
+            fi
       - store_artifacts:
           path: tdrs-backend/tdp_erd.png
diff --git a/Taskfile.yml b/Taskfile.yml
index 8f1731fe9..d5c0c8951 100644
--- a/Taskfile.yml
+++ b/Taskfile.yml
@@ -5,7 +5,7 @@ tasks:
   upload-kibana-objs:
     desc: Upload dashboards to Kibana server
     cmds:
-      - curl -X POST localhost:5601/api/saved_objects/_import -H "kbn-xsrf: true" --form file=@tdrs-backend/tdpservice/search_indexes/kibana_saved_objs.ndjson
+      - 'curl -X POST localhost:5601/api/saved_objects/_import -H "kbn-xsrf: true" --form file=@tdrs-backend/tdpservice/search_indexes/kibana_saved_objs.ndjson'
 
   create-network:
     desc: Create the external network
diff --git a/docs/Technical-Documentation/cloud-foundry-db-upgrade.md b/docs/Technical-Documentation/cloud-foundry-db-upgrade.md
deleted file mode 100644
index 466b562f6..000000000
--- a/docs/Technical-Documentation/cloud-foundry-db-upgrade.md
+++ /dev/null
@@ -1,120 +0,0 @@
-# Cloud Foundry, Cloud.gov AWS RDS Database Upgrade
-
-## Process
-
-If you are performing this process for the staging or production, you need to ensure you are performing the changes through the [HHS](https://github.com/HHS/TANF-app) repo and not the [Raft](https://github.com/raft-tech/TANF-app) repo.
-<br/>
-
-### 1. SSH into a backend app in your desired environment
-```bash
-cf ssh tdp-backend-<APP>
-```
-<br/>
-
-### 2. Create a backup of all the databases in the ENV's RDS instance
-Note: you can get the required field values from `VCAP_SERVICES`.
-```bash
-/home/vcap/deps/0/apt/usr/lib/postgresql/<CURRENT VERSION>/bin/pg_dump -h <HOST> -p <PORT> -d <DB_NAME> -U <DB_USER> -F c --no-acl --no-owner -f <FILE_NAME>.pg
-```
-<br/>
-
-### 3. Copy the backup(s) to your local machine
-Note: This assumes you ran the backup command above in the home directory of the app. As an added bonus for later steps, you should execute this command from somewhere within `tdrs-backend` directory! Make sure not to commit the files/directories that are copied to your local directory.
-```bash
-cf ssh tdp-backend--<APP> -c 'tar cfz - ~/app/*.pg' | tar xfz - -C .
-```
-<br/>
-
-### 4. Verify backup file size(s) match the backup size(s) in the app
-```bash
-ls -lh <PWD>/home/vcap/app
-```
-As an added verification step, you should consider restoring the backups into a local server and verifying the contents with `psql` or `pgAdmin`.
-<br/><br/>
-
-### 5. Update the `version` key in the `json_params` item in the `database` resource in the `main.tf` file in the environment(s) you're upgrading with the new database server version
-```yaml
-json_params      = "{\"version\": \"<NEW VERSION>\"}"
-```
-<br/>
-
-### 6. Update the `postgresql-client` version to the new version in `tdrs-backend/apt.yml`
-```yaml
-- postgresql-client-<NEW VERSION>
-```
-Note: if the underlying OS for CloudFoundry is no longer `cflinuxfs4` you may also need to update the repo we point to for the postgres client binaries.
-<br/><br/>
-
-### 7. Update the postgres container version in `tdrs-backend/docker-compose.yml`
-```yaml
-postgres:
-image: postgres:<NEW VERSION>
-```
-<br/>
-
-### 8. Update Terraform state to delete then re-create RDS instance
-Follow the instuctions in the `terraform/README.md` and proceed from there. Modify the `main.tf` file in the `terraform/<ENV>` directory to inform TF of the changes. To delete the existing RDS instance you can simply comment out the whole database `resource` in the file (even though you made changes in the steps above). TF will see that the resource is no longer there, delete it, and appropriately update it's state. Then you simply re-comment the database `resource` back in with the changes you made in previous steps. TF will create the new RDS instance with your new updates, and also update the state in S3.
-<br/><br/>
-
-### 9. Bind backend to the new RDS instance to get credentials
-```bash
-cf bind-service tdp-backend-<APP> tdp-db-<ENV>
-```
-Be sure to re-stage the app when prompted
-<br/><br/>
-
-### 10. Apply the backend manifest to begin the restore process
-If you copied the backups as mentioned in the note from step 3, the backups will be copied for you to the app instance in the command below. If not, you will need to use `scp` to copy the backups to the app instance after running the command below.
-```bash
-cf push tdp-backend-<APP> --no-route -f manifest.buildpack.yml -t 180 --strategy rolling
-```
-<br/>
-
-### 11. SSH into the app you just pushed
-```bash
-cf ssh tdp-backend-<APP>
-```
-<br/>
-
-### 12. Create the appropriate database(s) in the new RDS server
-Note: you can get the required field values from `VCAP_SERVICES`.
-```bash
-/home/vcap/deps/0/apt/usr/lib/postgresql/<NEW VERSION>/bin/createdb -U <DB_USER> -h <HOST> <DB_NAME>
-```
-<br/>
-
-### 13. Restore the backup(s) to the appropriate database(s)
-Note: you can get the required field values from `VCAP_SERVICES`.
-```bash
-/home/vcap/deps/0/apt/usr/lib/postgresql/<NEW VERSION>/bin/pg_restore -p <PORT> -h <HOST> -U <DB_USER> -d <DB_NAME> <FILE_NAME>.pg
-```
-During this step, you may see errors similar to the message below. Note `<DB_USER>` is imputed in the message to avoid leaking environment specific usernames/roles.
-```bash
-pg_restore: from TOC entry 215; 1259 17313 SEQUENCE users_user_user_permissions_id_seq <DB_USER>
-pg_restore: error: could not execute query: ERROR:  role "<DB_USER>" does not exist
-Command was: ALTER TABLE public.users_user_user_permissions_id_seq OWNER TO <DB_USER>;
-```
-and the result and total amount of these errors should be:
-```bash
-pg_restore: warning: errors ignored on restore: 68
-```
-If this is what you see, everything is OK. This happens because the `pg_dump` doesn't remove owner associations on sequences for some reason. But you will see in the blocks above that `pg_restore` correctly alters the sequence owner to the new database user.
-<br/><br/>
-
-### 14. Use `psql` to get into the database to check state
-Note: you can get the required field values from `VCAP_SERVICES`.
-```bash
-/home/vcap/deps/0/apt/usr/lib/postgresql/<NEW VERSION>/bin/psql <RDS URI>
-```
-<br/>
-
-### 15. Re-deploy or Re-stage the backend and frontend apps
-Pending your environment you can do this GitHub labels or you can re-stage the apps from Cloud.gov.
-<br/><br/>
-
-### 16. Access the re-deployed/re-staged apps and run a smoke test
-- Log in
-- Submit a few datafiles
-- Make sure new and existing submission histories populate correctly
-- Checkout the DACs data
-<br/>
diff --git a/docs/Technical-Documentation/nexus-repo.md b/docs/Technical-Documentation/nexus-repo.md
index 5e504a384..2cf5190be 100644
--- a/docs/Technical-Documentation/nexus-repo.md
+++ b/docs/Technical-Documentation/nexus-repo.md
@@ -123,7 +123,7 @@ Now you will no longer have to enter the password when logging in.
 ## Local Docker Login
 After logging into the `tanf-dev` space with the `cf` cli, execute the following commands to authenticate your local docker daemon
 ```
-export NEXUS_DOCKER_PASSWORD=`cf service-key tanf-keys nexus-dev | tail -n +2 | jq .credentials.password`
+export NEXUS_DOCKER_PASSWORD=`cf service-key tanf-keys nexus-dev | tail -n +2 | jq .password`
 echo "$NEXUS_DOCKER_PASSWORD" | docker login https://tdp-docker.dev.raftlabs.tech -u tdp-dev --password-stdin
 ```
 
diff --git a/product-updates/img/error-reports/error-report.png b/product-updates/img/error-reports/error-report.png
index bbb9594e9..79f96d81b 100644
Binary files a/product-updates/img/error-reports/error-report.png and b/product-updates/img/error-reports/error-report.png differ
diff --git a/product-updates/knowledge-center/index.html b/product-updates/knowledge-center/index.html
index 770593eb1..b0561bf2f 100644
--- a/product-updates/knowledge-center/index.html
+++ b/product-updates/knowledge-center/index.html
@@ -337,6 +337,99 @@ <h2 class="usa-card__heading" id="existing">Use an existing Login.gov Account</h
 
         <h2>What's new in TDP</h2>
 
+
+        <h3 style="margin-top: 32px;">October 10th 2024 (v 3.6.4)</h3>
+        <h4 style="margin-top: 16px; color: #595959;">Added:</h4>
+        <ul class="usa-icon-list">
+          <li class="usa-icon-list__item" style="margin-top: 16px;">
+            <div class="usa-icon-list__icon text-ink">
+              <img src="../img/added.svg" aria-hidden="true" alt="" style="background-color: #d3ffcc; border-radius: 5px; width: 18px; height: 18px; position: relative; top: 2px; margin-right: 5px; margin-left: 5px;"/>
+            </div>
+            <div class="usa-icon-list__content">
+              <h5 class="usa-icon-list__title">
+                New Error Type Column Added to Error Report
+              </h5>
+              <p>A new column called "Error Type" has been added to error reports. This feature will help users quickly locate high-priority errors affecting file submission.</p>
+              <p>The new column categorizes four types of errors by their priority and impact on file status. Detailed guidance to filter high-priority errors along with further information on this update is available in the <a href="viewing-error-reports.html#interpreting-error-types">Knowledge Center.</a></p>
+
+            </div>
+          </li>
+
+        </ul>   
+        <table class="usa-table">
+  <thead>
+    <tr>
+      <th scope="col">Error Type</th>
+      <th scope="col">Priority</th>
+      <th scope="col">Impact</th>
+      <th scope="col">File Status</th>
+    </tr>
+  </thead>
+  <tbody>
+    <tr>
+      <th scope="row">File Pre-Check</th>
+      <td>
+        High
+      </td>
+      <td>
+        Some or all of the record(s) in the file are rejected	
+      </td>
+      <td>
+        Rejected or Partially Accepted with errors
+      </td>
+
+    </tr>
+    <tr>
+      <th scope="row">Case Consistency</th>
+      <td>
+        High
+      </td>
+      <td>
+        Record(s) rejected
+      </td>
+      <td>
+        Partially accepted with errors
+      </td>
+    </tr>
+    <tr>
+      <th scope="row">Record Value Invalid</th>
+      <td>
+        Low
+      </td>
+      <td>
+        Record(s) accepted
+      </td>
+      <td>
+        Accepted with errors
+      </td>
+
+          <tr>
+      <th scope="row">Record Value Inconsistency</th>
+      <td>
+        Low
+      </td>
+      <td>
+        Record(s) accepted
+      </td>
+      <td>
+        Accepted with errors
+      </td>
+
+    </tr>
+      
+  </tbody>
+</table>
+</ul>
+
+
+
+        <hr style="border: none;
+    height: 1px;
+    background: #000;
+    margin-top: 32px;
+    background: repeating-linear-gradient(90deg,#000,#000 6px,transparent 6px,transparent 12px);">
+
+
         <h3 style="margin-top: 32px;">September 10th 2024 (v 3.6.0)</h3>
         <h4 style="margin-top: 16px; color: #595959;">In Development:</h4>
         <ul class="usa-icon-list">
diff --git a/product-updates/knowledge-center/viewing-error-reports.html b/product-updates/knowledge-center/viewing-error-reports.html
index a72931f3b..4186f2034 100644
--- a/product-updates/knowledge-center/viewing-error-reports.html
+++ b/product-updates/knowledge-center/viewing-error-reports.html
@@ -346,8 +346,6 @@ <h2 id="download-and-view">Download and View Error Reports</h2>
 <h2 id="data-file-structure">Data File Structure</h2>
   <div style="min-height: 22px;"></div>
       
-
-
          <p>The files that you submit on a quarterly basis for your TANF or SSP program contain large amounts of data in a <a href="https://www.acf.hhs.gov/ofa/policy-guidance/final-tanf-ssp-moe-data-reporting-system-transmission-files-layouts-and-edits">specific format</a> that enables TDP to process and provide feedback when uploaded and submitted. Please note that these files are designed for system processing and, as such, are not as human-readable. For example, below is an illustration of a short TANF, Section 1 (Active Case Data) file:</p>
 
   <a href="../img/error-reports/flatfile.png" data-lity data-lity-desc="TANF data file opened in a text editor beginning with a Header record and ending with a Trailer record">
@@ -558,6 +556,7 @@ <h2 id="overview-of-the-error-report">Overview of the Error Report</h2>
   <li>Item Number and Item Name — which can be used alongside the error message to cross-reference with the coding instructions.</li>
   <li>Internal Variable Name — a value specific to TDP's database for use in support sessions with the TANF data team.</li>
   <li>Row Number — identifying which row of the submitted file contains the record associated with a given error.</li>
+  <li>Error Type — indicates the category of error and is related to both the cause of the error, and its effect on whether records are accepted into the database.</li>
 </ul>
 <div style="min-height: 22px;"></div> 
 
@@ -565,13 +564,101 @@ <h2 id="overview-of-the-error-report">Overview of the Error Report</h2>
   <a href="../img/error-reports/error-report.png" data-lity data-lity-desc="TDP error report containing some of the error examples below opened in Excel">
       <img src="../img/error-reports/error-report.png" alt="TDP error report containing some of the error examples below opened in Excel">
       </a>
+
+      <div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
+      <h3 id="interpreting-error-types">Interpreting Error Types</h3>
+      <div style="min-height: 22px;"></div>
+      <p>There are four error types which affect data submission differently. Unlike low priority errors, high priority errors will prevent some/all records in the file from being accepted into the TDP system's database. You may filter the report to display high-priority errors first. Low priority errors will not affect whether a record makes it into the data base, but we encourage you to refer to the coding instructions to make corrections for the most accurate data. <a href="https://support.microsoft.com/en-us/office/filter-data-in-a-range-or-table-01832226-31b5-4568-8806-38c37dcc180e" target="_blank">Learn how to filter data here.</a></p>
+      <br>
+      <p>For users familiar with the legacy system, errors are equivalent to edits. File Pre-Check and Case Consistency errors are similar to fatal edits and Record Value Invalid and Record Value Inconsistency are similar to warnings.</p>
+
+      <table class="usa-table">
+        <thead>
+          <tr>
+            <th scope="col">Error Type</th>
+            <th scope="col">Description</th>
+            <th scope="col">Priority</th>
+            <th scope="col">Impact</th>
+            <th scope="col">File Status</th>
+          </tr>
+        </thead>
+        <tbody>
+          <tr>
+            <th scope="row">File Pre-Check</th>
+            <td>
+              Relate to the format of the records within a file (e.g. incorrect headers, incorrect record length).  
+            </td>
+            <td>
+            High
+            </td>
+            <td>
+              Some or all of the record(s) in the file are rejected            </td>
+            <td>
+              Rejected or Partially Accepted with errors
+
+            </td>
+      
+          </tr>
+          <tr>
+            <th scope="row">Case Consistency</th>
+            <td>
+              Relates to inconsistency across records (e.g. inconsistency across related person- and family-level records).
+              These errors are triggered by a series of checks that confirm that all the records for a family in a given month are related in a meaningful way.            </td>
+            <td>
+              High
+              </td>
+              <td>
+                Record(s) rejected              
+              </td>
+              <td>
+                Partially accepted with errors
+              </td>
+      
+          </tr>
+          <tr>
+            <th scope="row">Record Value Invalid</th>
+            <td>
+              These errors involve <i>unexpected</i> values in the record (e.g. missing values or out-of-range values).   
+            </td>
+            <td>
+              Low
+              </td>
+              <td>
+                Record(s) accepted             
+              </td>
+              <td>
+                Accepted with errors
+              </td>
+      
+          </tr>
+          <tr>
+            <th scope="row">Record Value Inconsistency</th>
+            <td>
+              These errors stem from inconsistencies in the record (e.g. conflicting data between items in the same record).              
+            </td>
+            <td>
+              Low
+              </td>
+              <td>
+                Record(s) accepted              
+              </td>
+              <td>
+                Accepted with errors
+              </td>
+      
+          </tr>
+            
+        </tbody>
+      </table>
+ 
 <div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
+
+
+
 <h3>Examples of Common Errors</h3>
 <div style="min-height: 22px;"></div>
 <p>Below are examples of error messages associated with common issues that may be listed in an error report.</p>
 
-
-
         <div class="usa-alert usa-alert--info usa-alert--no-icon">
   <div class="usa-alert__body">
     <p class="usa-alert__text">
@@ -581,11 +668,10 @@ <h3>Examples of Common Errors</h3>
 
 <div style="min-height: 22px;"></div>
 <div style="min-height: 22px;"></div>
-  <h4>Errors related to header or trailer records:</h4>
+  <h4>File Pre-Check: Errors related to header or trailer records:</h4>
   <div style="min-height: 22px;"></div>
   <p>Header-related errors are often the result of submitting files for a fiscal period that is not consistent with the time period in the header record (e.g. trying to submit 2022 data for a 2024 submission). Other header or trailer errors may be related to how the file was generated (e.g. the file produced is missing a header or trailer record). Some examples of how these types of error may appear in your error report are included below:</p>
 
-
      <div style="min-height: 22px;"></div>
 <code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">Submitted reporting year: 2024, quarter: 1 doesn't match file reporting year 2022, quarter: 4. 
     </code>
@@ -593,29 +679,31 @@ <h4>Errors related to header or trailer records:</h4>
     <code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">Your file does not begin with a HEADER record.
     </code>
 <div style="min-height: 22px;"></div>
-
-
             <p>Please refer to the <a href="https://www.acf.hhs.gov/sites/default/files/documents/ofa/transmission_file_header_trailer_record.pdf">Transmission File Header Record definitions</a> to compare your file's header or trailer to the expected layout.
 
 
   <div style="min-height: 22px;"></div><div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
-  <h4>Errors related to record length:</h4>
+  <h4>File Pre-Check: Errors related to record length:</h4>
   <div style="min-height: 22px;"></div>
   <p>Record length-related errors will be raised if the specified record is not aligned with the record layout requirements. For example, this kind of error may appear as follows in the report:</p>
    <div style="min-height: 22px;"></div>
-
-
-
-        
     <code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">T6 record length is 409 characters but must be 379.
     </code>
+    <div style="min-height: 22px;"></div>
 
+    <p>Please refer to the <a href="https://www.acf.hhs.gov/ofa/policy-guidance/final-tanf-ssp-moe-data-reporting-system-transmission-files-layouts-and-edits">Transmission File Layout documents</a> to compare your records against their expected layouts.</p>
 
-           <div style="min-height: 22px;"></div>
-           <p>Please refer to the <a href="https://www.acf.hhs.gov/ofa/policy-guidance/final-tanf-ssp-moe-data-reporting-system-transmission-files-layouts-and-edits">Transmission File Layout documents</a> to compare your records against their expected layouts.</p>
+    <div style="min-height: 22px;"></div><div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
+  <h4 id="related-records">Case Consistency: Errors related to inconsistent values across related records:</h4>
+  <div style="min-height: 22px;"></div>
+  <p>Errors with inconsistent values across related records may require review of the coding instructions to determine the proper correction. In the example below, the error is communicating that a T1 (family) record was found in the file that did not have a corresponding T2 (adult) or T3 (child) record, which effectively means that person-level records associated with this family are missing from the file. </p>
+  <div style="min-height: 22px;"></div>
+  <code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">Every T1 record should have at least one corresponding T2 or T3 record with the same Report Month & Year and Case Number.
+
+  </code>
 
   <div style="min-height: 22px;"></div><div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
-  <h4>Errors related to invalid values for a specific item/data element:</h4>
+  <h4>Record Value Invalid: Errors related to invalid values for a specific item/data element:</h4>
   <div style="min-height: 22px;"></div>
   <p>Invalid value errors can come up when a specific item/data element has an unexpected value (e.g. a letter or a symbol was reported for the zip code field, such as: "462$1"): </p>
 
@@ -630,32 +718,15 @@ <h4>Errors related to invalid values for a specific item/data element:</h4>
             <li><a href="https://www.acf.hhs.gov/ofa/policy-guidance/acf-ofa-pi-23-04">TANF (ACF-199) and SSP-MOE (ACF-209) Coding Instructions</a></li>
           </ul>
 
-
   <div style="min-height: 22px;"></div><div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
-  <h4 id="inconsistent-values">Errors related to inconsistent values for related items/data elements in the same record:</h4>
+  <h4 id="inconsistent-values">Record Value Inconsistency: Errors related to inconsistent values for related items/data elements in the same record:</h4>
    <div style="min-height: 22px;"></div>
   <p>Some errors may require review of the coding instructions for multiple items (and their respective values) to determine the proper correction. In the example below, the error is communicating that the value reported for Item 49 is in a conflict with the value for Item 30 in the same record. This message suggests a problem with either the value of Item 49 or the value of Item 30. Refer to the coding instructions and your own data to determine which value needs to be corrected.</p>
-
-
-
-             <div style="min-height: 22px;"></div>
-    <code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">If Item 30 (Family Affiliation) is 1 then Item 49 (Work Participation Status) must be in set of values [01, 02, 05, 07, 09, 15, 17, 18, 19, 99].
-    </code>
 <div style="min-height: 22px;"></div>
-
-    <div style="min-height: 22px;"></div><div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
-  <h4 id="related-records">Errors related to inconsistent values across related records:</h4>
-  <div style="min-height: 22px;"></div>
-  <p>Errors with inconsistent values across related records may require review of the coding instructions to determine the proper correction. In the example below, the error is communicating that a T1 (family) record was found in the file that did not have a corresponding T2 (adult) or T3 (child) record, which effectively means that person-level records associated with this family are missing from the file. </p>
-
-
-
-                       <div style="min-height: 22px;"></div>
-    <code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">Every T1 record should have at least one corresponding T2 or T3 record with the same Report Month & Year and Case Number.
-    </code>
+<code style="border-left: solid 5px; border-color: #005ea2; padding: 8px; background-color: #f8f8f8; display: inline-block;">If Item 30 (Family Affiliation) is 1 then Item 49 (Work Participation Status) must be in set of values [01, 02, 05, 07, 09, 15, 17, 18, 19, 99].
+</code>
+<div style="min-height: 22px;"></div>
 <div style="min-height: 22px;"></div>
-
-<div style="min-height: 22px;"></div> <div style="min-height: 22px;"></div> 
 
                   <hr style="border: none;
     height: 1px;
diff --git a/tdrs-backend/db-upgrade/cloud-foundry-db-upgrade.md b/tdrs-backend/db-upgrade/cloud-foundry-db-upgrade.md
new file mode 100644
index 000000000..abb9caa30
--- /dev/null
+++ b/tdrs-backend/db-upgrade/cloud-foundry-db-upgrade.md
@@ -0,0 +1,204 @@
+# Cloud Foundry, Cloud.gov AWS RDS Database Upgrade
+The process below provides a guide to roll our backend applications over to a new RDS version and instance. The entire process can take several hours and does involve downtime for the environment which you are upgrading. Be sure to take those factors into account when commencing the process.
+
+## Process
+
+### 1. Open an SSH tunnel to the service
+To execute commands on the RDS instance we can open an SSH tunnel to the service and run all our commands from our local machine. Keep this tunnel open in a separate terminal window until this process is complete!
+
+```
+cf connect-to-service --no-client <APP_NAME_THAT_IS_BOUND_TO_RDS> <RDS_SERVICE_NAME>
+```
+
+You should see output similar to:
+
+```
+Finding the service instance details...
+Setting up SSH tunnel...
+SSH tunnel created.
+Skipping call to client CLI. Connection information:
+
+Host: localhost
+Port: 63634
+Username: <REDACTED>
+Password: <REDACTED>
+Name: <REDACTED>
+
+Leave this terminal open while you want to use the SSH tunnel. Press Control-C to stop.
+```
+
+### 2. Create a backup of the database(s) in the RDS instance
+In a separate terminal from your SSH tunnel terminal, generate the `pg_dump` files.
+Note: the HOST, PORT, DB_USER, and PASSWORD are the values you received from the output of the SSH tunnel. The DB_NAME parameter is the name of the DB you want to export, e.g `tdp_db_raft`. You will need to run this command for each DB in the instance.
+
+```
+pg_dump -h <HOST> -p <PORT> -d <DB_NAME> -U <DB_USER> -F c --no-acl --no-owner -f <FILE_NAME>.pg
+```
+
+After the command finishes, you should see <FILE_NAME>.pg in your current working directory.
+
+### 3. Update Terraform to create a new RDS instance
+Follow the instructions in the `terraform/README.md` to get Terraform configured. Modify the `main.tf` file in the `terraform/<ENV>` to include a new RDS instance. E.g if you were updating `prod` to version 15.x you would add the following code to the `main.tf` file. We are **NOT** removing the existing `resource "cloudfoundry_service_instance" "database"` from the `main.tf` file. Note that the resource name (i.e. `new-database`) and the `name` of the new RDS instance are not the same as the original resource name and RDS name. This is on purpose and we will remedy this in later steps.
+
+```yaml
+resource "cloudfoundry_service_instance" "new-database" {
+  name             = "tdp-db-prod-new"
+  space            = data.cloudfoundry_space.space.id
+  service_plan     = data.cloudfoundry_service.rds.service_plans["medium-gp-psql"]
+  json_params      = "{\"version\": \"15\", \"storage_type\": \"gp3\", \"storage\": 500}"
+  recursive_delete = true
+  timeouts {
+    create = "60m"
+    update = "60m"
+    delete = "2h"
+  }
+}
+```
+
+After adding the new RDS resource to `main.tf`, you can follow the rest of the instructions in the `terraform/README.md` to plan and then apply this change with Terraform.
+
+### 4. Bind an app to the new RDS instance
+In the `tdrs-backend/db-upgrade` directory, open the `manifest.yml` file and update the `services` block to reference the new RDS service you just created: in the example this would be: `- tdp-db-prod-new`. Then deploy this manifest: `cf push --no-route -f manifest.yml -t 180`. Wait for the connector app to deploy. We need to deploy a temporary app to avoid too much downtime for the backend app(s), erroneous transactions on the new RDS instance, and so that we can start a new SSH tunnel to the new RDS instance. If you haven't already, you should now close the original SSH tunnel we opened in step 1.
+
+### 5. Open an SSH tunnel to the new RDS instance
+Again, in a separate terminal execute the following command and leave that terminal/connection alive until further notice.
+
+```
+cf connect-to-service --no-client db-connector <NEW_RDS_SERVICE_NAME>
+```
+
+### 6. Create the appropriate database(s) in the new RDS server
+Using the credentials from the new SSH tunnel, create the same DB(s) you dumped in the new RDS instance.
+
+```
+createdb -U <DB_USER> -h <HOST> -p <PORT> <DB_NAME>
+```
+
+### 7. Restore the backup(s) to the appropriate database(s)
+Using the credentials from the new SSH tunnel, restore the backups to the appropriate DBs.
+
+```
+pg_restore -p <PORT> -h <HOST> -U <DB_USER> -d <DB_NAME> <FILE_NAME>.pg
+```
+
+During this step, you may see errors similar to the message below. Note `<DB_USER>` is imputed in the message to avoid leaking environment specific usernames/roles.
+
+```
+pg_restore: from TOC entry 215; 1259 17313 SEQUENCE users_user_user_permissions_id_seq <DB_USER>
+pg_restore: error: could not execute query: ERROR:  role "<DB_USER>" does not exist
+Command was: ALTER TABLE public.users_user_user_permissions_id_seq OWNER TO <DB_USER>;
+```
+
+and the result and total amount of these errors should be something like:
+
+```
+pg_restore: warning: errors ignored on restore: 68
+```
+
+If this is what you see, everything is OK. This happens because the `pg_dump` doesn't remove all owner associations on DB objects for some reason. But you will see in the blocks above that `pg_restore` correctly alters the object owner to the new database user.
+
+### 8. Use `psql` to get into the database(s) to check state
+Using the credentials from the new SSH tunnel, use the psql cli to inspect the restored DBs. You should consider counting the number of tables in the new and old DBs, counting some records across different tables, etc...
+
+```
+psql -p <PORT> -h <HOST> -U <DB_USER> -d <DB_NAME>
+```
+
+### 9. Rename and Move RDS instances
+Now that we have verified the data in our new RDS instance looks good, we need to lift and shift the backend app(s) to point to our new RDS instance as if it is the existing (now old) RDS instance.
+
+First we need to unbind the existing RDS instance from the backend app(s) it is bound to.
+
+```
+cf unbind service <BACKEND_APP_NAME> <OLD_RDS_SERVICE_NAME>
+```
+
+After unbinding the service we want to update the "old RDS" service `name` to something different, plan, and then apply those changes with Terraform.
+
+```yaml
+resource "cloudfoundry_service_instance" "database" {
+  name             = "something-that-isnt-tdp-db-prod"
+  space            = data.cloudfoundry_space.space.id
+  service_plan     = data.cloudfoundry_service.rds.service_plans["medium-gp-psql"]
+  json_params      = "{\"version\": \"15\", \"storage_type\": \"gp3\", \"storage\": 500}"
+  recursive_delete = true
+  timeouts {
+    create = "60m"
+    update = "60m"
+    delete = "2h"
+  }
+}
+```
+
+Now we can name our "new RDS" service to the expected `name` (i.e. the original `name` field from our old RDS instance). Then we plan and apply those changes with Terraform.
+
+```yaml
+resource "cloudfoundry_service_instance" "new-database" {
+  name             = "tdp-db-prod"
+  space            = data.cloudfoundry_space.space.id
+  service_plan     = data.cloudfoundry_service.rds.service_plans["medium-gp-psql"]
+  json_params      = "{\"version\": \"15\", \"storage_type\": \"gp3\", \"storage\": 500}"
+  recursive_delete = true
+  timeouts {
+    create = "60m"
+    update = "60m"
+    delete = "2h"
+  }
+}
+```
+
+Next we will bind the new RDS service back to the backend app(s) we unbound the old instance from and restage them. Be sure to monitor the backend app's logs to ensure it connects to the instance and starts as expected.
+
+```
+cf bind service <BACKEND_APP_NAME> <RDS_SERVICE_NAME>
+```
+
+Then
+
+```
+cf restage <BACKEND_APP_NAME>
+```
+
+If the backend app(s) are running with no issues, we can now safely remove the "old RDS" service from Terraform. Remove the entire resource block named `database` from `main.tf`, plan and then apply the changes to remove that instance with Terraform.
+
+Finally, to get our Terraform state looking like it originally did, we want to rename our `new-database` resource back to `database`. That way we are consistent. To do so we rename the resource, and to avoid Terraform from deleting it (since `database` won't exist in the state) we want to inform Terraform that we have "moved" the resource. We do so by adding the following code to the `main.tf`. Note, when running `terraform plan ...` it will not show any infrastructure changes, only a name change. Ensure you still apply even if it looks like there are no changes!
+
+```yaml
+moved {
+  from = cloudfoundry_service_instance.new-database
+  to   = cloudfoundry_service_instance.database
+}
+```
+
+After adding the above code, plan and apply the changes with Terrform. Once Terraform has successfully applied the change, remove the `moved` block from `main.tf`. Run `terraform plan ...` again and assert it agrees that there are no changes to be made. If Terraform reports changes, you have made a mistake and need to figure out where you made the mistake.
+
+### 10. Access the re-staged app(s) and run a smoke test
+- Log in
+- Submit a few datafiles
+- Make sure new and existing submission histories populate correctly
+- Checkout the DACs data
+
+If everything looks good, there is nothing to do. If apps aren't working/connecting to the new RDS instance, you will need to debug manually and determine if/where you made a mistake.
+
+### 11. Update the `postgresql-client` version to the new version in `tdrs-backend/apt.yml`
+
+```yaml
+- postgresql-client-<NEW VERSION>
+```
+
+Note: if the underlying OS for CloudFoundry is no longer `cflinuxfs4` (code name `jammy`) you may also need to update the repo we point to for the postgres client binaries.
+
+### 12. Update the postgres container version in `tdrs-backend/docker-compose.yml`
+```yaml
+postgres:
+image: postgres:<NEW VERSION>
+```
+
+### 13. Commit and push correct changes, revert unnecessary changes.
+Commit and push the changes for:
+- `main.tf`
+- `tdrs-backend/apt.yml`
+- `tdrs-backend/docker-compose.yml`
+
+Revert the changes for:
+- `manifest.yml`
diff --git a/tdrs-backend/db-upgrade/manifest.yml b/tdrs-backend/db-upgrade/manifest.yml
new file mode 100644
index 000000000..33f655e96
--- /dev/null
+++ b/tdrs-backend/db-upgrade/manifest.yml
@@ -0,0 +1,12 @@
+version: 1
+applications:
+- name: db-connector
+  instances: 1
+  memory: 512M
+  disk_quota: 2G
+  env:
+    POSTGRES_PASSWORD: password
+  docker:
+    image: postgres:15.7-alpine3.20
+  services:
+    - <DB_SERVICE_TO_CONNECT_TO>
diff --git a/tdrs-backend/docker-compose.yml b/tdrs-backend/docker-compose.yml
index 3330ae493..7ab823d3e 100644
--- a/tdrs-backend/docker-compose.yml
+++ b/tdrs-backend/docker-compose.yml
@@ -178,7 +178,7 @@ services:
       - ELASTICSEARCH_LOG_INDEX_SLOW_LEVEL
     volumes:
       - .:/tdpapp
-      - logs:/tdpapp
+      - logs:/tmp
     image: tdp-backend
     build: .
     command: >
diff --git a/tdrs-backend/docs/session-management.md b/tdrs-backend/docs/session-management.md
index e4f0c1831..6c079efe8 100644
--- a/tdrs-backend/docs/session-management.md
+++ b/tdrs-backend/docs/session-management.md
@@ -11,7 +11,7 @@ When the user logs in, they will receive an HttpOnly cookie with no `Expires=` s
 SESSION_EXPIRE_AT_BROWSER_CLOSE=True
 ```
 
-The cookie itself contains a `sessionid` reference to a Django-managed session. The session expiration is set to the same expiration of the login.gov-provided jwt, **15 minutes**.
+The cookie itself contains a `sessionid` reference to a Django-managed session. By deafult, the session expiration is set to the same expiration of the login.gov-provided jwt, **15 minutes**. Since `sessionid` is being signed when created, it not possible to update the expiry without decoding and recreating the signature, so the session expires even though the cookie is being extended with every request. Thus, in order to overcome this shortcoming, a longer expiry is being assigned to `sessionid` using a new variable `SIGNED_COOKIE_EXPIRES` in common settings.
 
 This is managed in `tdrs-backend/tdpservice/settings/common.py` with the following setting:
 ```python
diff --git a/tdrs-backend/setup.cfg b/tdrs-backend/setup.cfg
index ab064a186..9ce169907 100644
--- a/tdrs-backend/setup.cfg
+++ b/tdrs-backend/setup.cfg
@@ -6,6 +6,7 @@ omit =
     tdpservice/settings/production.py
     tdpservice/settings/staging.py
     tdpservice/wsgi.py
+    tdpservice/scheduling/*
     *test*
     *migrations*
 
@@ -18,6 +19,7 @@ omit =
     tdpservice/settings/production.py
     tdpservice/settings/staging.py
     tdpservice/wsgi.py
+    tdpservice/scheduling/*
     *test*
     *migrations*
 
diff --git a/tdrs-backend/tdpservice/core/custom_session_engine.py b/tdrs-backend/tdpservice/core/custom_session_engine.py
new file mode 100644
index 000000000..70f397a52
--- /dev/null
+++ b/tdrs-backend/tdpservice/core/custom_session_engine.py
@@ -0,0 +1,34 @@
+"""Custom session engine for TDP."""
+
+from django.contrib.sessions.backends import signed_cookies
+from django.core import signing
+import datetime
+from django.conf import settings
+
+class SessionStore(signed_cookies.SessionStore):
+    """Custom session engine for TDP."""
+
+    def __init__(self, session_key=None):
+        """Initialize the custom session engine."""
+        super().__init__(session_key)
+
+    def load(self):
+        """Load the session data from the database."""
+        """
+        Load the data from the key itself instead of fetching from some
+        external data store. Opposite of _get_session_key(), raise BadSignature
+        if signature fails.
+        """
+
+        try:
+            return signing.loads(
+                self.session_key,
+                serializer=self.serializer,
+                # This doesn't handle non-default expiry dates, see #19201
+                max_age=datetime.timedelta(seconds=settings.SIGNED_COOKIE_EXPIRES),
+                salt="django.contrib.sessions.backends.signed_cookies",
+            )
+        except Exception:
+            # BadSignature, ValueError, or unpickling exceptions. If any of
+            # these happen, reset the session.
+            return {}
diff --git a/tdrs-backend/tdpservice/data_files/admin/admin.py b/tdrs-backend/tdpservice/data_files/admin/admin.py
index 27c9b8868..eae060c36 100644
--- a/tdrs-backend/tdpservice/data_files/admin/admin.py
+++ b/tdrs-backend/tdpservice/data_files/admin/admin.py
@@ -1,10 +1,9 @@
 """Admin class for DataFile objects."""
 from django.contrib import admin
 from tdpservice.core.utils import ReadOnlyAdminMixin
-# from tdpservice.core.filters import custom_filter_title
 from tdpservice.data_files.models import DataFile, LegacyFileTransfer
 from tdpservice.parsers.models import DataFileSummary, ParserError
-from tdpservice.data_files.admin.filters import DataFileSummaryPrgTypeFilter, LatestReparseEvent, VersionFilter
+from tdpservice.data_files.admin.filters import LatestReparseEvent, VersionFilter
 from django.conf import settings
 from django.utils.html import format_html
 from datetime import datetime, timedelta, timezone
@@ -15,7 +14,7 @@
 class DataFileInline(admin.TabularInline):
     """Inline model for many to many relationship."""
 
-    model = DataFile.reparse_meta_models.through
+    model = DataFile.reparses.through
     can_delete = False
     ordering = ["-pk"]
 
@@ -113,14 +112,15 @@ def queryset(self, request, queryset):
     ]
 
     list_filter = [
+        'stt',
+        'year',
         'quarter',
         'section',
-        'stt',
+        'summary__status',
+        'stt__type',
+        'stt__region',
         'user',
-        'year',
         SubmissionDateFilter,
-        'summary__status',
-        DataFileSummaryPrgTypeFilter,
         LatestReparseEvent,
         VersionFilter,
     ]
diff --git a/tdrs-backend/tdpservice/data_files/admin/filters.py b/tdrs-backend/tdpservice/data_files/admin/filters.py
index 0f991d882..9fbb35530 100644
--- a/tdrs-backend/tdpservice/data_files/admin/filters.py
+++ b/tdrs-backend/tdpservice/data_files/admin/filters.py
@@ -4,27 +4,6 @@
 from tdpservice.search_indexes.models.reparse_meta import ReparseMeta
 from tdpservice.core.filters import MostRecentVersionFilter
 
-class DataFileSummaryPrgTypeFilter(admin.SimpleListFilter):
-    """Admin class filter for Program Type on datafile model."""
-
-    title = 'Program Type'
-    parameter_name = 'program_type'
-
-    def lookups(self, request, model_admin):
-        """Return a list of tuples."""
-        return [
-            ('TAN', 'TAN'),
-            ('SSP', 'SSP'),
-        ]
-
-    def queryset(self, request, queryset):
-        """Return a queryset."""
-        if self.value():
-            query_set_ids = [df.id for df in queryset if df.prog_type == self.value()]
-            return queryset.filter(id__in=query_set_ids)
-        else:
-            return queryset
-
 
 class LatestReparseEvent(admin.SimpleListFilter):
     """Filter class to filter files based on the latest reparse event."""
@@ -56,7 +35,7 @@ def queryset(self, request, queryset):
         if self.value() is not None and queryset.exists():
             latest_meta = ReparseMeta.get_latest()
             if latest_meta is not None:
-                queryset = queryset.filter(reparse_meta_models=latest_meta)
+                queryset = queryset.filter(reparses=latest_meta)
         return queryset
 
 
diff --git a/tdrs-backend/tdpservice/data_files/migrations/0014_reparsefilemeta.py b/tdrs-backend/tdpservice/data_files/migrations/0014_reparsefilemeta.py
new file mode 100644
index 000000000..d9cb6ee8b
--- /dev/null
+++ b/tdrs-backend/tdpservice/data_files/migrations/0014_reparsefilemeta.py
@@ -0,0 +1,29 @@
+# Generated by Django 3.2.15 on 2024-10-08 12:18
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('search_indexes', '0031_alter_tribal_tanf_t4_closure_reason'),
+        ('data_files', '0013_datafile_reparse_meta'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='ReparseFileMeta',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('finished', models.BooleanField(default=False)),
+                ('success', models.BooleanField(default=False)),
+                ('started_at', models.DateTimeField(null=True)),
+                ('finished_at', models.DateTimeField(null=True)),
+                ('num_records_created', models.PositiveIntegerField(default=0)),
+                ('cat_4_errors_generated', models.PositiveIntegerField(default=0)),
+                ('data_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reparse_file_metas', to='data_files.datafile')),
+                ('reparse_meta', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reparse_file_metas', to='search_indexes.reparsemeta')),
+            ],
+        ),
+    ]
diff --git a/tdrs-backend/tdpservice/data_files/migrations/0015_datafile_reparses.py b/tdrs-backend/tdpservice/data_files/migrations/0015_datafile_reparses.py
new file mode 100644
index 000000000..c4cdea583
--- /dev/null
+++ b/tdrs-backend/tdpservice/data_files/migrations/0015_datafile_reparses.py
@@ -0,0 +1,20 @@
+# Generated by Django 3.2.15 on 2024-10-04 12:17
+
+from django.db import migrations, models
+from tdpservice.data_files.models import ReparseFileMeta
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('data_files', '0014_reparsefilemeta'),
+        ('search_indexes', '0031_alter_tribal_tanf_t4_closure_reason'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='datafile',
+            name='reparses',
+            field=models.ManyToManyField(help_text='Reparse events this file has been associated with.', related_name='files', through="data_files.ReparseFileMeta", to='search_indexes.ReparseMeta'),
+        ),
+    ]
diff --git a/tdrs-backend/tdpservice/data_files/migrations/0016_remove_datafile_reparse_meta_models.py b/tdrs-backend/tdpservice/data_files/migrations/0016_remove_datafile_reparse_meta_models.py
new file mode 100644
index 000000000..8a8796e92
--- /dev/null
+++ b/tdrs-backend/tdpservice/data_files/migrations/0016_remove_datafile_reparse_meta_models.py
@@ -0,0 +1,38 @@
+# Generated by Django 3.2.15 on 2024-10-04 12:17
+
+from django.db import migrations
+
+
+def switch_reparse_meta_through_model(apps, schema_editor):
+    DataFile=apps.get_model("data_files","DataFile")
+    ReparseMeta=apps.get_model("search_indexes","ReparseMeta")
+    OldThru=DataFile.reparse_meta_models.through
+    ReparseFileMeta=apps.get_model("data_files", "ReparseFileMeta")
+
+    q = OldThru.objects.all()
+
+    print(f'switching {q.count()} through models')
+
+    for m in q:
+        ReparseFileMeta.objects.create(
+            data_file_id=m.datafile.pk,
+            reparse_meta_id=m.reparsemeta.pk
+        )
+        m.delete()
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('data_files', '0015_datafile_reparses'),
+    ]
+
+    operations = [
+        migrations.RunPython(
+            switch_reparse_meta_through_model,
+        ),
+        migrations.RemoveField(
+            model_name='datafile',
+            name='reparse_meta_models',
+        ),
+    ]
diff --git a/tdrs-backend/tdpservice/data_files/models.py b/tdrs-backend/tdpservice/data_files/models.py
index 6fe5355e0..edf93e75e 100644
--- a/tdrs-backend/tdpservice/data_files/models.py
+++ b/tdrs-backend/tdpservice/data_files/models.py
@@ -79,6 +79,24 @@ class Meta:
     # separately
     extension = models.CharField(max_length=8, default="txt")
 
+class ReparseFileMeta(models.Model):
+    """Meta data model representing a single file parse within a reparse execution."""
+
+    data_file = models.ForeignKey('data_files.DataFile', on_delete=models.CASCADE, related_name='reparse_file_metas')
+    reparse_meta = models.ForeignKey(
+        'search_indexes.ReparseMeta',
+        on_delete=models.CASCADE,
+        related_name='reparse_file_metas'
+    )
+
+    finished = models.BooleanField(default=False)
+    success = models.BooleanField(default=False)
+    started_at = models.DateTimeField(auto_now_add=False, null=True)
+    finished_at = models.DateTimeField(auto_now_add=False, null=True)
+
+    num_records_created = models.PositiveIntegerField(default=0)
+    cat_4_errors_generated = models.PositiveIntegerField(default=0)
+
 
 class DataFile(FileRecord):
     """Represents a version of a data file."""
@@ -153,10 +171,12 @@ class Meta:
                                         null=True
                                         )
 
-    reparse_meta_models = models.ManyToManyField("search_indexes.ReparseMeta",
-                                                 help_text="Reparse events this file has been associated with.",
-                                                 related_name="datafiles"
-                                                 )
+    reparses = models.ManyToManyField(
+        "search_indexes.ReparseMeta",
+        through="data_files.ReparseFileMeta",
+        help_text="Reparse events this file has been associated with.",
+        related_name="files"
+    )
 
     @property
     def prog_type(self):
@@ -164,8 +184,6 @@ def prog_type(self):
         # e.g., 'SSP Closed Case Data'
         if self.section.startswith('SSP'):
             return 'SSP'
-        elif self.section.startswith('Tribal'):
-            return 'TAN'  # problematic, do we need to infer tribal entirely from tribe/fips code?
         else:
             return 'TAN'
 
diff --git a/tdrs-backend/tdpservice/data_files/tasks.py b/tdrs-backend/tdpservice/data_files/tasks.py
index 16e35de79..0ea5446af 100644
--- a/tdrs-backend/tdpservice/data_files/tasks.py
+++ b/tdrs-backend/tdpservice/data_files/tasks.py
@@ -13,7 +13,7 @@
 
 def get_stuck_files():
     """Return a queryset containing files in a 'stuck' state."""
-    stuck_files = DataFile.objects.annotate(reparse_count=Count('reparse_meta_models')).filter(
+    stuck_files = DataFile.objects.annotate(reparse_count=Count('reparses')).filter(
         # non-reparse submissions over an hour old
         Q(
             reparse_count=0,
@@ -22,9 +22,9 @@ def get_stuck_files():
         # reparse submissions past the timeout, where the reparse did not complete
         Q(
             reparse_count__gt=0,
-            reparse_meta_models__timeout_at__lte=timezone.now(),
-            reparse_meta_models__finished=False,
-            reparse_meta_models__success=False
+            reparses__timeout_at__lte=timezone.now(),
+            reparse_file_metas__finished=False,
+            reparse_file_metas__success=False
         )
     ).filter(
         # where there is NO summary or the summary is in PENDING status
diff --git a/tdrs-backend/tdpservice/data_files/test/test_stuck_files.py b/tdrs-backend/tdpservice/data_files/test/test_stuck_files.py
index 95f4f8f3a..10a480ec4 100644
--- a/tdrs-backend/tdpservice/data_files/test/test_stuck_files.py
+++ b/tdrs-backend/tdpservice/data_files/test/test_stuck_files.py
@@ -31,12 +31,10 @@ def make_summary(datafile, status):
     )
 
 
-def make_reparse_meta(finished, success):
+def make_reparse_meta():
     """Create a test reparse meta model."""
     return ReparseMetaFactory.create(
-        timeout_at=_time_ago(hours=1),
-        finished=finished,
-        success=success
+        timeout_at=_time_ago(hours=1)
     )
 
 
@@ -54,8 +52,8 @@ def test_find_pending_submissions__none_stuck(stt_user, stt):
     df2.created_at = _time_ago(hours=1)
     df2.save()
     make_summary(df2, DataFileSummary.Status.ACCEPTED)
-    rpm = make_reparse_meta(True, True)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': True, 'success': True})
 
     # a pending standard submission, less than an hour old
     df3 = make_datafile(stt_user, stt, 3)
@@ -81,8 +79,8 @@ def test_find_pending_submissions__non_reparse_stuck(stt_user, stt):
     df2.created_at = _time_ago(hours=1)
     df2.save()
     make_summary(df2, DataFileSummary.Status.ACCEPTED)
-    rpm = make_reparse_meta(True, True)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': True, 'success': True})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 1
@@ -102,8 +100,8 @@ def test_find_pending_submissions__non_reparse_stuck__no_dfs(stt_user, stt):
     df2.created_at = _time_ago(hours=1)
     df2.save()
     make_summary(df2, DataFileSummary.Status.ACCEPTED)
-    rpm = make_reparse_meta(True, True)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': True, 'success': True})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 1
@@ -124,8 +122,8 @@ def test_find_pending_submissions__reparse_stuck(stt_user, stt):
     df2.created_at = _time_ago(hours=1)
     df2.save()
     make_summary(df2, DataFileSummary.Status.PENDING)
-    rpm = make_reparse_meta(False, False)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': False, 'success': False})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 1
@@ -145,8 +143,8 @@ def test_find_pending_submissions__reparse_stuck__no_dfs(stt_user, stt):
     df2 = make_datafile(stt_user, stt, 2)
     df2.created_at = _time_ago(hours=1)
     df2.save()
-    rpm = make_reparse_meta(False, False)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': False, 'success': False})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 1
@@ -167,8 +165,8 @@ def test_find_pending_submissions__reparse_and_non_reparse_stuck(stt_user, stt):
     df2.created_at = _time_ago(hours=1)
     df2.save()
     make_summary(df2, DataFileSummary.Status.PENDING)
-    rpm = make_reparse_meta(False, False)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': False, 'success': False})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 2
@@ -188,8 +186,8 @@ def test_find_pending_submissions__reparse_and_non_reparse_stuck_no_dfs(stt_user
     df2 = make_datafile(stt_user, stt, 2)
     df2.created_at = _time_ago(hours=1)
     df2.save()
-    rpm = make_reparse_meta(False, False)
-    df2.reparse_meta_models.add(rpm)
+    rpm = make_reparse_meta()
+    df2.reparses.add(rpm, through_defaults={'finished': False, 'success': False})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 2
@@ -207,8 +205,8 @@ def test_find_pending_submissions__old_reparse_stuck__new_not_stuck(stt_user, st
     dfs1 = make_summary(df1, DataFileSummary.Status.PENDING)
 
     # reparse fails the first time
-    rpm1 = make_reparse_meta(False, False)
-    df1.reparse_meta_models.add(rpm1)
+    rpm1 = make_reparse_meta()
+    df1.reparses.add(rpm1, through_defaults={'finished': False, 'success': False})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 1
@@ -217,8 +215,8 @@ def test_find_pending_submissions__old_reparse_stuck__new_not_stuck(stt_user, st
     dfs1.delete()  # reparse deletes the original dfs and creates the new one
     make_summary(df1, DataFileSummary.Status.ACCEPTED)
 
-    rpm2 = make_reparse_meta(True, True)
-    df1.reparse_meta_models.add(rpm2)
+    rpm2 = make_reparse_meta()
+    df1.reparses.add(rpm2, through_defaults={'finished': True, 'success': True})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 0
@@ -234,8 +232,8 @@ def test_find_pending_submissions__new_reparse_stuck__old_not_stuck(stt_user, st
     dfs1 = make_summary(df1, DataFileSummary.Status.REJECTED)
 
     # reparse succeeds
-    rpm1 = make_reparse_meta(True, True)
-    df1.reparse_meta_models.add(rpm1)
+    rpm1 = make_reparse_meta()
+    df1.reparses.add(rpm1, through_defaults={'finished': True, 'success': True})
 
     # reparse again, fails this time
     dfs1.delete()  # reparse deletes the original dfs and creates the new one
@@ -244,8 +242,8 @@ def test_find_pending_submissions__new_reparse_stuck__old_not_stuck(stt_user, st
         status=DataFileSummary.Status.PENDING,
     )
 
-    rpm2 = make_reparse_meta(False, False)
-    df1.reparse_meta_models.add(rpm2)
+    rpm2 = make_reparse_meta()
+    df1.reparses.add(rpm2, through_defaults={'finished': False, 'success': False})
 
     stuck_files = get_stuck_files()
     assert stuck_files.count() == 1
diff --git a/tdrs-backend/tdpservice/parsers/parse.py b/tdrs-backend/tdpservice/parsers/parse.py
index b2b9f0445..187787745 100644
--- a/tdrs-backend/tdpservice/parsers/parse.py
+++ b/tdrs-backend/tdpservice/parsers/parse.py
@@ -13,7 +13,6 @@
 from tdpservice.parsers.schema_defs.utils import get_section_reference, get_program_model
 from tdpservice.parsers.case_consistency_validator import CaseConsistencyValidator
 from tdpservice.parsers.util import log_parser_exception
-from tdpservice.search_indexes.models.reparse_meta import ReparseMeta
 
 logger = logging.getLogger(__name__)
 
@@ -34,7 +33,6 @@ def parse_datafile(datafile, dfs):
         logger.info(f"Preparser Error: {len(header_errors)} header errors encountered.")
         errors['header'] = header_errors
         bulk_create_errors({1: header_errors}, 1, flush=True)
-        update_meta_model(datafile, dfs)
         return errors
     elif header_is_valid and len(header_errors) > 0:
         logger.info(f"Preparser Warning: {len(header_errors)} header warnings encountered.")
@@ -75,7 +73,6 @@ def parse_datafile(datafile, dfs):
                     f"({header['program_type']}) and FIPS Code ({field_values['state_fips']}).",)
         errors['header'] = [tribe_error]
         bulk_create_errors({1: [tribe_error]}, 1, flush=True)
-        update_meta_model(datafile, dfs)
         return errors
 
     # Ensure file section matches upload section
@@ -90,7 +87,6 @@ def parse_datafile(datafile, dfs):
         errors['document'] = [section_error]
         unsaved_parser_errors = {1: [section_error]}
         bulk_create_errors(unsaved_parser_errors, 1, flush=True)
-        update_meta_model(datafile, dfs)
         return errors
 
     rpt_month_year_is_valid, rpt_month_year_error = category1.validate_header_rpt_month_year(
@@ -103,7 +99,6 @@ def parse_datafile(datafile, dfs):
         errors['document'] = [rpt_month_year_error]
         unsaved_parser_errors = {1: [rpt_month_year_error]}
         bulk_create_errors(unsaved_parser_errors, 1, flush=True)
-        update_meta_model(datafile, dfs)
         return errors
 
     line_errors = parse_datafile_lines(datafile, dfs, program_type, section, is_encrypted, case_consistency_validator)
@@ -112,11 +107,6 @@ def parse_datafile(datafile, dfs):
 
     return errors
 
-def update_meta_model(datafile, dfs):
-    """Update appropriate meta models."""
-    ReparseMeta.increment_records_created(datafile.reparse_meta_models, dfs.total_number_of_records_created)
-    ReparseMeta.increment_files_completed(datafile.reparse_meta_models)
-
 def bulk_create_records(unsaved_records, line_number, header_count, datafile, dfs, flush=False):
     """Bulk create passed in records."""
     batch_size = settings.BULK_CREATE_BATCH_SIZE
@@ -385,7 +375,6 @@ def parse_datafile_lines(datafile, dfs, program_type, section, is_encrypted, cas
             rollback_records(unsaved_records.get_bulk_create_struct(), datafile)
             rollback_parser_errors(datafile)
             bulk_create_errors(preparse_error, num_errors, flush=True)
-            update_meta_model(datafile, dfs)
             return errors
 
         if prev_sum != header_count + trailer_count:
@@ -448,7 +437,6 @@ def parse_datafile_lines(datafile, dfs, program_type, section, is_encrypted, cas
         rollback_parser_errors(datafile)
         preparse_error = {line_number: [err_obj]}
         bulk_create_errors(preparse_error, num_errors, flush=True)
-        update_meta_model(datafile, dfs)
         return errors
 
     should_remove = validate_case_consistency(case_consistency_validator)
@@ -469,7 +457,6 @@ def parse_datafile_lines(datafile, dfs, program_type, section, is_encrypted, cas
         logger.error(f"Not all parsed records created for file: {datafile.id}!")
         rollback_records(unsaved_records.get_bulk_create_struct(), datafile)
         bulk_create_errors(unsaved_parser_errors, num_errors, flush=True)
-        update_meta_model(datafile, dfs)
         return errors
 
     # Add any generated cat4 errors to our error data structure & clear our caches errors list
@@ -486,8 +473,6 @@ def parse_datafile_lines(datafile, dfs, program_type, section, is_encrypted, cas
                  f"validated {case_consistency_validator.total_cases_validated} of them.")
     dfs.save()
 
-    update_meta_model(datafile, dfs)
-
     return errors
 
 
diff --git a/tdrs-backend/tdpservice/parsers/test/factories.py b/tdrs-backend/tdpservice/parsers/test/factories.py
index c0f50e85b..5b952d02d 100644
--- a/tdrs-backend/tdpservice/parsers/test/factories.py
+++ b/tdrs-backend/tdpservice/parsers/test/factories.py
@@ -17,11 +17,6 @@ class Meta:
         model = "search_indexes.ReparseMeta"
 
     timeout_at = timezone.now()
-    finished = False
-    success = False
-    num_files_to_reparse = 1
-    files_completed = 1
-    files_failed = 0
 
 
 class ParsingFileFactory(factory.django.DjangoModelFactory):
diff --git a/tdrs-backend/tdpservice/scheduling/parser_task.py b/tdrs-backend/tdpservice/scheduling/parser_task.py
index 2b1fb3d51..06d0f7b21 100644
--- a/tdrs-backend/tdpservice/scheduling/parser_task.py
+++ b/tdrs-backend/tdpservice/scheduling/parser_task.py
@@ -2,12 +2,13 @@
 from __future__ import absolute_import
 from celery import shared_task
 import logging
+from django.utils import timezone
 from django.contrib.auth.models import Group
 from django.db.utils import DatabaseError
 from tdpservice.users.models import AccountApprovalStatusChoices, User
-from tdpservice.data_files.models import DataFile
+from tdpservice.data_files.models import DataFile, ReparseFileMeta
 from tdpservice.parsers.parse import parse_datafile
-from tdpservice.parsers.models import DataFileSummary, ParserErrorCategoryChoices
+from tdpservice.parsers.models import DataFileSummary, ParserErrorCategoryChoices, ParserError
 from tdpservice.parsers.aggregates import case_aggregates_by_month, total_errors_by_month
 from tdpservice.parsers.util import log_parser_exception, make_generate_parser_error
 from tdpservice.email.helpers.data_file import send_data_submitted_email
@@ -17,8 +18,16 @@
 logger = logging.getLogger(__name__)
 
 
+def set_reparse_file_meta_model_failed_state(file_meta):
+    """Set ReparseFileMeta fields to indicate a parse failure."""
+    file_meta.finished = True
+    file_meta.success = False
+    file_meta.finished_at = timezone.now()
+    file_meta.save()
+
+
 @shared_task
-def parse(data_file_id, should_send_submission_email=True):
+def parse(data_file_id, reparse_id=None):
     """Send data file for processing."""
     # passing the data file FileField across redis was rendering non-serializable failures, doing the below lookup
     # to avoid those. I suppose good practice to not store/serializer large file contents in memory when stored in redis
@@ -27,6 +36,12 @@ def parse(data_file_id, should_send_submission_email=True):
         data_file = DataFile.objects.get(id=data_file_id)
         logger.info(f"DataFile parsing started for file {data_file.filename}")
 
+        file_meta = None
+        if reparse_id:
+            file_meta = ReparseFileMeta.objects.get(data_file_id=data_file_id, reparse_meta_id=reparse_id)
+            file_meta.started_at = timezone.now()
+            file_meta.save()
+
         dfs = DataFileSummary.objects.create(datafile=data_file, status=DataFileSummary.Status.PENDING)
         errors = parse_datafile(data_file, dfs)
         dfs.status = dfs.get_status()
@@ -41,7 +56,18 @@ def parse(data_file_id, should_send_submission_email=True):
         logger.info(f"Parsing finished for file -> {repr(data_file)} with status "
                     f"{dfs.status} and {len(errors)} errors.")
 
-        if should_send_submission_email is True:
+        if reparse_id is not None:
+            file_meta.num_records_created = dfs.total_number_of_records_created
+            file_meta.cat_4_errors_generated = ParserError.objects.filter(
+                file_id=data_file_id,
+                error_type=ParserErrorCategoryChoices.CASE_CONSISTENCY
+            ).count()
+            file_meta.finished = True
+            file_meta.success = True
+            file_meta.finished_at = timezone.now()
+            file_meta.save()
+            ReparseMeta.set_total_num_records_post(ReparseMeta.objects.get(pk=reparse_id))
+        else:
             recipients = User.objects.filter(
                 stt=data_file.stt,
                 account_approval_status=AccountApprovalStatusChoices.APPROVED,
@@ -54,7 +80,8 @@ def parse(data_file_id, should_send_submission_email=True):
                              f"Encountered Database exception in parser_task.py: \n{e}",
                              "error"
                              )
-        ReparseMeta.increment_files_failed(data_file.reparse_meta_models)
+        if reparse_id:
+            set_reparse_file_meta_model_failed_state(file_meta)
     except Exception as e:
         generate_error = make_generate_parser_error(data_file, None)
         error = generate_error(schema=None,
@@ -72,4 +99,5 @@ def parse(data_file_id, should_send_submission_email=True):
                              (f"Uncaught exception while parsing datafile: {data_file.pk}! Please review the logs to "
                               f"see if manual intervention is required. Exception: \n{e}"),
                              "critical")
-        ReparseMeta.increment_files_failed(data_file.reparse_meta_models)
+        if reparse_id:
+            set_reparse_file_meta_model_failed_state(file_meta)
diff --git a/tdrs-backend/tdpservice/search_indexes/admin/reparse_meta.py b/tdrs-backend/tdpservice/search_indexes/admin/reparse_meta.py
index f030501f8..4ea731475 100644
--- a/tdrs-backend/tdpservice/search_indexes/admin/reparse_meta.py
+++ b/tdrs-backend/tdpservice/search_indexes/admin/reparse_meta.py
@@ -1,4 +1,4 @@
-"""ModelAdmin classes for parsed SSP data files."""
+"""ModelAdmin class for the ReparseMeta model."""
 from .mixins import ReadOnlyAdminMixin
 from tdpservice.data_files.admin.admin import DataFileInline
 
@@ -8,18 +8,37 @@ class ReparseMetaAdmin(ReadOnlyAdminMixin):
 
     inlines = [DataFileInline]
 
+    def reparse_is_finished(self, instance):
+        """Overload instance property for ui checkboxes."""
+        return instance.is_finished
+    reparse_is_finished.boolean = True
+
+    def reparse_is_success(self, instance):
+        """Overload instance property for ui checkboxes."""
+        return instance.is_success
+    reparse_is_success.boolean = True
+
     list_display = [
         'id',
         'created_at',
         'timeout_at',
-        'success',
-        'finished',
+        'reparse_is_finished',
+        'reparse_is_success',
         'db_backup_location',
     ]
 
     list_filter = [
-        'success',
-        'finished',
         'fiscal_year',
         'fiscal_quarter',
     ]
+
+    readonly_fields = [
+        'reparse_is_finished',
+        'reparse_is_success',
+        'finished_at',
+        'num_files',
+        'num_files_completed',
+        'num_files_succeeded',
+        'num_files_failed',
+        'num_records_created',
+    ]
diff --git a/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py b/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py
index d0c7a9934..48d4cf3fe 100644
--- a/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py
+++ b/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py
@@ -182,9 +182,9 @@ def _handle_datafiles(self, files, meta_model, log_context):
         """Delete, re-save, and reparse selected datafiles."""
         for file in files:
             try:
-                file.reparse_meta_models.add(meta_model)
+                file.reparses.add(meta_model)
                 file.save()
-                parser_task.parse.delay(file.pk, should_send_submission_email=False)
+                parser_task.parse.delay(file.pk, reparse_id=meta_model.pk)
             except DatabaseError as e:
                 log('Encountered a DatabaseError while re-creating datafiles. The database '
                     'and Elastic are INCONSISTENT! Restore the DB from the backup as soon as possible!',
@@ -341,8 +341,7 @@ def handle(self, *args, **options):
                                                 fiscal_year=fiscal_year,
                                                 all=reparse_all,
                                                 new_indices=new_indices,
-                                                delete_old_indices=new_indices,
-                                                num_files_to_reparse=num_files)
+                                                delete_old_indices=new_indices)
 
         # Backup the Postgres DB
         backup_file_name += f"_rpv{meta_model.pk}.pg"
diff --git a/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py b/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py
index a531ae558..c14a302a1 100644
--- a/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py
+++ b/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py
@@ -31,7 +31,7 @@ def __get_log_context(self):
 
     def __get_index_suffix(self):
         meta_model = ReparseMeta.get_latest()
-        if meta_model is not None and not meta_model.finished:
+        if meta_model is not None and not meta_model.is_finished:
             return f"_rpv{meta_model.pk}"
         fmt = "%Y-%m-%d_%H.%M.%S"
         return f"_{datetime.now().strftime(fmt)}"
diff --git a/tdrs-backend/tdpservice/search_indexes/migrations/0032_auto_20241008_1745.py b/tdrs-backend/tdpservice/search_indexes/migrations/0032_auto_20241008_1745.py
new file mode 100644
index 000000000..4724f0a3f
--- /dev/null
+++ b/tdrs-backend/tdpservice/search_indexes/migrations/0032_auto_20241008_1745.py
@@ -0,0 +1,37 @@
+# Generated by Django 3.2.15 on 2024-10-08 17:45
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('search_indexes', '0031_alter_tribal_tanf_t4_closure_reason'),
+    ]
+
+    operations = [
+        migrations.RemoveField(
+            model_name='reparsemeta',
+            name='files_completed',
+        ),
+        migrations.RemoveField(
+            model_name='reparsemeta',
+            name='files_failed',
+        ),
+        migrations.RemoveField(
+            model_name='reparsemeta',
+            name='finished',
+        ),
+        migrations.RemoveField(
+            model_name='reparsemeta',
+            name='num_files_to_reparse',
+        ),
+        migrations.RemoveField(
+            model_name='reparsemeta',
+            name='num_records_created',
+        ),
+        migrations.RemoveField(
+            model_name='reparsemeta',
+            name='success',
+        ),
+    ]
diff --git a/tdrs-backend/tdpservice/search_indexes/models/reparse_meta.py b/tdrs-backend/tdpservice/search_indexes/models/reparse_meta.py
index ddbf4ce4a..a12d7b5b8 100644
--- a/tdrs-backend/tdpservice/search_indexes/models/reparse_meta.py
+++ b/tdrs-backend/tdpservice/search_indexes/models/reparse_meta.py
@@ -1,7 +1,6 @@
 """Meta data model for tracking reparsed files."""
 
-from django.db import models, transaction
-from django.db.utils import DatabaseError
+from django.db import models
 from django.db.models import Max
 from tdpservice.search_indexes.util import count_all_records
 import logging
@@ -25,28 +24,66 @@ class Meta:
     created_at = models.DateTimeField(auto_now_add=True)
     timeout_at = models.DateTimeField(auto_now_add=False, null=True)
 
-    finished = models.BooleanField(default=False)
-    success = models.BooleanField(default=False, help_text="All files completed parsing.")
-
-    num_files_to_reparse = models.PositiveIntegerField(default=0)
-    files_completed = models.PositiveIntegerField(default=0)
-    files_failed = models.PositiveIntegerField(default=0)
-
     num_records_deleted = models.PositiveIntegerField(default=0)
-    num_records_created = models.PositiveIntegerField(default=0)
-
     total_num_records_initial = models.PositiveBigIntegerField(default=0)
     total_num_records_post = models.PositiveBigIntegerField(default=0)
 
     db_backup_location = models.CharField(max_length=512)
 
-    # Options used to select the files to reparse
+    # Options used to select the files to reparse (from mgmt cmd only, remove if command deprecated)
     fiscal_quarter = models.CharField(max_length=2, null=True)
     fiscal_year = models.PositiveIntegerField(null=True)
     all = models.BooleanField(default=False)
     new_indices = models.BooleanField(default=False)
     delete_old_indices = models.BooleanField(default=False)
 
+    @property
+    def is_finished(self):
+        """Return True if all associated ReparseFileMeta objects are finished."""
+        if self.num_files > 0:
+            return all([r.finished for r in self.reparse_file_metas.all()])
+        return False
+
+    @property
+    def is_success(self):
+        """Return True if all associated ReparseFileMeta objects are successful."""
+        if self.is_finished:
+            return all([r.success for r in self.reparse_file_metas.all()])
+        return False
+
+    @property
+    def finished_at(self):
+        """Return the finished_at timestamp of the last ReparseFileMeta object."""
+        last_parse = self.reparse_file_metas.order_by('-finished_at').first()
+        return last_parse.finished_at if last_parse else None
+
+    @property
+    def num_files(self):
+        """Return the number of associated ReparseFileMeta objects."""
+        return self.reparse_file_metas.count()
+
+    @property
+    def num_files_completed(self):
+        """Return the number of completed ReparseFileMeta objects."""
+        return self.reparse_file_metas.filter(finished=True).count()
+
+    @property
+    def num_files_succeeded(self):
+        """Return the number of successful ReparseFileMeta objects."""
+        return self.reparse_file_metas.filter(finished=True, success=True).count()
+
+    @property
+    def num_files_failed(self):
+        """Return the number of failed ReparseFileMeta objects."""
+        return self.reparse_file_metas.filter(finished=True, success=False).count()
+
+    @property
+    def num_records_created(self):
+        """Return the sum of records created for all associated ReparseFileMeta objects."""
+        return sum([r.num_records_created for r in self.reparse_file_metas.all()])
+
+    # remove unused statics or change to utils funcs in own app and/or make new cleanup ticket for future
+
     @staticmethod
     def file_counts_match(meta_model):
         """
@@ -56,11 +93,10 @@ def file_counts_match(meta_model):
         containing this model has not been locked the caller will experience race issues.
         """
         print("\n\nINSIDE FILE COUNTS MATCH:")
-        print(f"{meta_model.num_files_to_reparse }, {meta_model.files_completed}, {meta_model.files_failed}\n\n")
-        return (meta_model.files_completed == meta_model.num_files_to_reparse or
-                meta_model.files_completed + meta_model.files_failed ==
-                meta_model.num_files_to_reparse or
-                meta_model.files_failed == meta_model.num_files_to_reparse)
+        print(f"{meta_model.num_files }, {meta_model.num_files_completed}, {meta_model.num_files_failed}\n\n")
+        return (meta_model.num_files_completed == meta_model.num_files or
+                meta_model.num_files_completed + meta_model.num_files_failed ==
+                meta_model.num_files or meta_model.num_files_failed == meta_model.num_files)
 
     @staticmethod
     def assert_all_files_done(meta_model):
@@ -70,84 +106,10 @@ def assert_all_files_done(meta_model):
         This function assumes the meta_model has been passed in a distributed/thread safe way. If the database row
         containing this model has not been locked the caller will experience race issues.
         """
-        if meta_model.finished and ReparseMeta.file_counts_match(meta_model):
+        if meta_model.is_finished and ReparseMeta.file_counts_match(meta_model):
             return True
         return False
 
-    @staticmethod
-    def set_reparse_finished(meta_model):
-        """
-        Set status/completion fields to appropriate values.
-
-        This function assumes the meta_model has been passed in a distributed/thread safe way. If the database row
-        containing this model has not been locked the caller will experience race issues.
-        """
-        meta_model.finished = True
-        meta_model.success = meta_model.files_completed == meta_model.num_files_to_reparse
-        meta_model.total_num_records_post = count_all_records()
-        meta_model.save()
-
-    @staticmethod
-    def increment_files_completed(reparse_meta_models):
-        """
-        Increment the count of files that have completed parsing for the datafile's current/latest reparse model.
-
-        Because this function can be called in parallel we use `select_for_update` because multiple parse tasks can
-        referrence the same ReparseMeta object that is being queried below. `select_for_update` provides a DB lock on
-        the object and forces other transactions on the object to wait until this one completes.
-        """
-        if reparse_meta_models.exists():
-            with transaction.atomic():
-                try:
-                    meta_model = reparse_meta_models.select_for_update().latest("pk")
-                    meta_model.files_completed += 1
-                    if ReparseMeta.file_counts_match(meta_model):
-                        ReparseMeta.set_reparse_finished(meta_model)
-                    meta_model.save()
-                except DatabaseError:
-                    logger.exception("Encountered exception while trying to update the `files_reparsed` field on the "
-                                     f"ReparseMeta object with ID: {meta_model.pk}.")
-
-    @staticmethod
-    def increment_files_failed(reparse_meta_models):
-        """
-        Increment the count of files that failed parsing for the datafile's current/latest reparse meta model.
-
-        Because this function can be called in parallel we use `select_for_update` because multiple parse tasks can
-        referrence the same ReparseMeta object that is being queried below. `select_for_update` provides a DB lock on
-        the object and forces other transactions on the object to wait until this one completes.
-        """
-        if reparse_meta_models.exists():
-            with transaction.atomic():
-                try:
-                    meta_model = reparse_meta_models.select_for_update().latest("pk")
-                    meta_model.files_failed += 1
-                    if ReparseMeta.file_counts_match(meta_model):
-                        ReparseMeta.set_reparse_finished(meta_model)
-                    meta_model.save()
-                except DatabaseError:
-                    logger.exception("Encountered exception while trying to update the `files_failed` field on the "
-                                     f"ReparseMeta object with ID: {meta_model.pk}.")
-
-    @staticmethod
-    def increment_records_created(reparse_meta_models, num_created):
-        """
-        Increment the count of records created for the datafile's current/latest reparse meta model.
-
-        Because this function can be called in parallel we use `select_for_update` because multiple parse tasks can
-        referrence the same ReparseMeta object that is being queried below. `select_for_update` provides a DB lock on
-        the object and forces other transactions on the object to wait until this one completes.
-        """
-        if reparse_meta_models.exists():
-            with transaction.atomic():
-                try:
-                    meta_model = reparse_meta_models.select_for_update().latest("pk")
-                    meta_model.num_records_created += num_created
-                    meta_model.save()
-                except DatabaseError:
-                    logger.exception("Encountered exception while trying to update the `files_failed` field on the "
-                                     f"ReparseMeta object with ID: {meta_model.pk}.")
-
     @staticmethod
     def get_latest():
         """Get the ReparseMeta model with the greatest pk."""
@@ -155,3 +117,10 @@ def get_latest():
         if max_pk.get("pk__max", None) is None:
             return None
         return ReparseMeta.objects.get(pk=max_pk["pk__max"])
+
+    @staticmethod
+    def set_total_num_records_post(meta_model):
+        """Update the total_num_records_post field once reparse has completed."""
+        if meta_model.is_finished:
+            meta_model.total_num_records_post = count_all_records()
+            meta_model.save()
diff --git a/tdrs-backend/tdpservice/search_indexes/test/test_reparse.py b/tdrs-backend/tdpservice/search_indexes/test/test_reparse.py
index 2c8647cea..54d49aedb 100644
--- a/tdrs-backend/tdpservice/search_indexes/test/test_reparse.py
+++ b/tdrs-backend/tdpservice/search_indexes/test/test_reparse.py
@@ -6,6 +6,7 @@
 from tdpservice.search_indexes.management.commands import clean_and_reparse
 from tdpservice.search_indexes.models.reparse_meta import ReparseMeta
 from tdpservice.users.models import User
+from tdpservice.data_files.models import ReparseFileMeta
 
 from django.contrib.admin.models import LogEntry, ADDITION
 from django.db.utils import DatabaseError
@@ -265,7 +266,7 @@ def test_reparse_dunce():
     assert ReparseMeta.objects.count() == 0
 
 @pytest.mark.django_db
-def test_reparse_sequential(log_context):
+def test_reparse_sequential(log_context, big_file):
     """Test reparse _assert_sequential_execution."""
     cmd = clean_and_reparse.Command()
     assert True is cmd._assert_sequential_execution(log_context)
@@ -278,6 +279,7 @@ def test_reparse_sequential(log_context):
         "safely execute reparse, please fix manually."
     )
 
+    big_file.reparses.add(meta)
     meta.timeout_at = timezone.now() + timedelta(seconds=100)
     meta.save()
     assert False is cmd._assert_sequential_execution(log_context)
@@ -287,6 +289,7 @@ def test_reparse_sequential(log_context):
 
     meta.timeout_at = timezone.now()
     meta.save()
+
     assert True is cmd._assert_sequential_execution(log_context)
     timeout_entry = LogEntry.objects.latest('pk')
     assert timeout_entry.change_message == ("Previous reparse has exceeded the timeout. Allowing "
@@ -308,7 +311,7 @@ def test_reparse_quarter_and_year(mocker, dfs, cat4_edge_case_file, big_file, sm
     cmd.handle(**opts)
 
     latest = ReparseMeta.objects.select_for_update().latest("pk")
-    assert latest.num_files_to_reparse == 1
+    assert latest.num_files == 1
     assert latest.num_records_deleted == 3073
 
 @pytest.mark.django_db()
@@ -327,7 +330,7 @@ def test_reparse_quarter(mocker, dfs, cat4_edge_case_file, big_file, small_ssp_s
     cmd.handle(**opts)
 
     latest = ReparseMeta.objects.select_for_update().latest("pk")
-    assert latest.num_files_to_reparse == 4
+    assert latest.num_files == 4
     assert latest.num_records_deleted == 3104
 
 @pytest.mark.django_db()
@@ -346,7 +349,7 @@ def test_reparse_year(mocker, dfs, cat4_edge_case_file, big_file, small_ssp_sect
     cmd.handle(**opts)
 
     latest = ReparseMeta.objects.select_for_update().latest("pk")
-    assert latest.num_files_to_reparse == 2
+    assert latest.num_files == 2
     assert latest.num_records_deleted == 27
 
 @pytest.mark.django_db()
@@ -365,7 +368,7 @@ def test_reparse_all(mocker, dfs, cat4_edge_case_file, big_file, small_ssp_secti
     cmd.handle(**opts)
 
     latest = ReparseMeta.objects.select_for_update().latest("pk")
-    assert latest.num_files_to_reparse == 4
+    assert latest.num_files == 4
     assert latest.num_records_deleted == 3104
 
 @pytest.mark.django_db()
@@ -387,97 +390,85 @@ def test_reparse_no_files(mocker):
                                                             "Quarter: Q1-4. Nothing to do.")
 
 @pytest.mark.django_db()
-def test_mm_all_files_done():
+def test_mm_all_files_done(big_file):
     """Test meta model all files done."""
     meta_model = ReparseMeta.objects.create()
+    big_file.reparses.add(meta_model)
     assert ReparseMeta.assert_all_files_done(meta_model) is False
 
-    meta_model.finished = True
-    meta_model.files_completed = 1
-    meta_model.num_files_to_reparse = 1
+    fm = ReparseFileMeta.objects.get(data_file_id=big_file.pk, reparse_meta_id=meta_model.pk)
+    fm.finished = True
+    fm.save()
     assert ReparseMeta.assert_all_files_done(meta_model) is True
 
 @pytest.mark.django_db()
-def test_mm_increment_files_completed(big_file):
+def test_mm_files_completed(big_file):
     """Test meta model increment files completed."""
-    meta_model = ReparseMeta.objects.create(num_files_to_reparse=2, all=True)
-    big_file.reparse_meta_models.add(meta_model)
+    meta_model = ReparseMeta.objects.create(all=True)
+    big_file.reparses.add(meta_model)
     big_file.save()
 
-    ReparseMeta.increment_files_completed(big_file.reparse_meta_models)
     meta_model = ReparseMeta.get_latest()
-    assert meta_model.finished is False
-    assert meta_model.files_completed == 1
-    assert meta_model.files_failed == 0
+    assert meta_model.is_finished is False
+    assert meta_model.num_files == 1
+    assert meta_model.num_files_completed == 0
+    assert meta_model.num_files_failed == 0
+    assert ReparseMeta.assert_all_files_done(meta_model) is False
 
-    ReparseMeta.increment_files_completed(big_file.reparse_meta_models)
+    fm = ReparseFileMeta.objects.get(data_file_id=big_file.pk, reparse_meta_id=meta_model.pk)
+    fm.finished = True
+    fm.success = True
+    fm.save()
     meta_model = ReparseMeta.get_latest()
-    assert meta_model.finished is True
-    assert meta_model.files_completed == 2
-    assert meta_model.files_failed == 0
+    assert meta_model.is_finished is True
+    assert meta_model.num_files == 1
+    assert meta_model.num_files_completed == 1
+    assert meta_model.num_files_failed == 0
 
-    assert meta_model.success is True
+    assert meta_model.is_success is True
 
     assert ReparseMeta.assert_all_files_done(meta_model) is True
 
 @pytest.mark.django_db()
-def test_mm_increment_files_failed(big_file):
+def test_mm_files_failed(big_file):
     """Test meta model increment files failed."""
-    meta_model = ReparseMeta.objects.create(num_files_to_reparse=2, all=True)
-    big_file.reparse_meta_models.add(meta_model)
-    big_file.save()
-
-    ReparseMeta.increment_files_failed(big_file.reparse_meta_models)
-    meta_model = ReparseMeta.get_latest()
-    assert meta_model.finished is False
-    assert meta_model.files_completed == 0
-    assert meta_model.files_failed == 1
-
-    ReparseMeta.increment_files_failed(big_file.reparse_meta_models)
-    meta_model = ReparseMeta.get_latest()
-    assert meta_model.finished is True
-    assert meta_model.files_completed == 0
-    assert meta_model.files_failed == 2
-
-    assert meta_model.success is False
-
-    assert ReparseMeta.assert_all_files_done(meta_model) is True
-
-@pytest.mark.django_db()
-def test_mm_increment_files_failed_and_passed(big_file):
-    """Test meta model both increment failed and passed files."""
-    meta_model = ReparseMeta.objects.create(num_files_to_reparse=2, all=True)
-    big_file.reparse_meta_models.add(meta_model)
+    meta_model = ReparseMeta.objects.create(all=True)
+    big_file.reparses.add(meta_model)
     big_file.save()
 
-    ReparseMeta.increment_files_completed(big_file.reparse_meta_models)
     meta_model = ReparseMeta.get_latest()
-    assert meta_model.finished is False
-    assert meta_model.files_completed == 1
-    assert meta_model.files_failed == 0
+    assert meta_model.is_finished is False
+    assert meta_model.num_files_completed == 0
+    assert meta_model.num_files_failed == 0
+    assert ReparseMeta.assert_all_files_done(meta_model) is False
 
-    ReparseMeta.increment_files_failed(big_file.reparse_meta_models)
+    fm = ReparseFileMeta.objects.get(data_file_id=big_file.pk, reparse_meta_id=meta_model.pk)
+    fm.finished = True
+    fm.save()
     meta_model = ReparseMeta.get_latest()
-    assert meta_model.finished is True
-    assert meta_model.files_completed == 1
-    assert meta_model.files_failed == 1
+    assert meta_model.is_finished is True
+    assert meta_model.num_files_completed == 1
+    assert meta_model.num_files_failed == 1
 
-    assert meta_model.success is False
+    assert meta_model.is_success is False
 
     assert ReparseMeta.assert_all_files_done(meta_model) is True
 
 @pytest.mark.django_db()
 def test_mm_increment_records_created(big_file):
     """Test meta model increment records created."""
-    meta_model = ReparseMeta.objects.create(num_files_to_reparse=2, all=True)
-    big_file.reparse_meta_models.add(meta_model)
+    meta_model = ReparseMeta.objects.create(all=True)
+    big_file.reparses.add(meta_model)
     big_file.save()
 
-    ReparseMeta.increment_records_created(big_file.reparse_meta_models, 500)
     meta_model = ReparseMeta.get_latest()
-    assert meta_model.num_records_created == 500
+    assert meta_model.num_records_created == 0
 
-    ReparseMeta.increment_records_created(big_file.reparse_meta_models, 888)
+    fm = ReparseFileMeta.objects.get(data_file_id=big_file.pk, reparse_meta_id=meta_model.pk)
+    fm.finished = True
+    fm.success = True
+    fm.num_records_created = 1388
+    fm.save()
     meta_model = ReparseMeta.get_latest()
     assert meta_model.num_records_created == 1388
 
@@ -492,18 +483,37 @@ def test_mm_get_latest():
     assert ReparseMeta.get_latest() != meta1
 
 @pytest.mark.django_db()
-def test_mm_file_counts_match():
+def test_mm_file_counts_match(big_file):
     """Test meta model file counts match."""
-    meta_model = ReparseMeta.objects.create(num_files_to_reparse=2)
+    meta_model = ReparseMeta.objects.create()
+    big_file.reparses.add(meta_model)
+    big_file.save()
     assert ReparseMeta.file_counts_match(meta_model) is False
 
-    meta_model.files_completed = 2
+    fm = ReparseFileMeta.objects.get(data_file_id=big_file.pk, reparse_meta_id=meta_model.pk)
+    fm.finished = True
+    fm.save()
     assert ReparseMeta.file_counts_match(meta_model) is True
 
-    meta_model.files_completed = 0
-    meta_model.files_failed = 2
-    assert ReparseMeta.file_counts_match(meta_model) is True
+@pytest.mark.django_db()
+def test_reparse_finished_success_false_before_file_queue(big_file):
+    """Test is_finished and is_success are False if no files added."""
+    meta_model = ReparseMeta.objects.create()
+    assert meta_model.is_finished is False
+    assert meta_model.is_success is False
 
-    meta_model.files_completed = 1
-    meta_model.files_failed = 1
-    assert ReparseMeta.file_counts_match(meta_model) is True
+    big_file.reparses.add(meta_model)
+    big_file.save()
+    assert meta_model.is_finished is False
+    assert meta_model.is_success is False
+
+    fm = ReparseFileMeta.objects.get(data_file_id=big_file.pk, reparse_meta_id=meta_model.pk)
+    fm.finished = True
+    fm.save()
+    assert meta_model.is_finished is True
+    assert meta_model.is_success is False
+
+    fm.success = True
+    fm.save()
+    assert meta_model.is_finished is True
+    assert meta_model.is_success is True
diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py
index ba936b545..6f4e35353 100644
--- a/tdrs-backend/tdpservice/settings/common.py
+++ b/tdrs-backend/tdpservice/settings/common.py
@@ -281,10 +281,11 @@ class Common(Configuration):
     )
 
     # Sessions
-    SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies"
+    SESSION_ENGINE = "tdpservice.core.custom_session_engine"
+    SIGNED_COOKIE_EXPIRES = 60 * 60 * 12  # 12 hours
     SESSION_COOKIE_HTTPONLY = True
-    SESSION_EXPIRE_AT_BROWSER_CLOSE = True
-    SESSION_COOKIE_AGE = 15 * 60  # 15 minutes
+    SESSION_SAVE_EVERY_REQUEST = True
+    SESSION_COOKIE_AGE = 60 * 30  # 30 minutes
     # The CSRF token Cookie holds no security benefits when confined to HttpOnly.
     # Setting this to false to allow the frontend to include it in the header
     # of API POST calls to prevent false negative authorization errors.
diff --git a/tdrs-backend/tdpservice/users/test/test_permissions.py b/tdrs-backend/tdpservice/users/test/test_permissions.py
index ae53b3cda..f1b3847ad 100644
--- a/tdrs-backend/tdpservice/users/test/test_permissions.py
+++ b/tdrs-backend/tdpservice/users/test/test_permissions.py
@@ -159,6 +159,9 @@ def test_ofa_system_admin_permissions(ofa_system_admin):
         'search_indexes.add_reparsemeta',
         'search_indexes.view_reparsemeta',
         'search_indexes.change_reparsemeta',
+        'data_files.add_reparsefilemeta',
+        'data_files.view_reparsefilemeta',
+        'data_files.change_reparsefilemeta',
     }
     group_permissions = ofa_system_admin.get_group_permissions()
     assert group_permissions == expected_permissions
diff --git a/tdrs-frontend/src/components/Reports/Reports.jsx b/tdrs-frontend/src/components/Reports/Reports.jsx
index 82e161e37..a22ae4fb1 100644
--- a/tdrs-frontend/src/components/Reports/Reports.jsx
+++ b/tdrs-frontend/src/components/Reports/Reports.jsx
@@ -244,140 +244,187 @@ function Reports() {
           </div>
         )}
         <form>
-          {(isOFAAdmin || isDIGITTeam || isSystemAdmin) && (
-            <div
-              className={classNames('usa-form-group maxw-mobile margin-top-4', {
-                'usa-form-group--error': formValidation.stt,
-              })}
-            >
-              <STTComboBox
-                selectedStt={sttInputValue}
-                selectStt={selectStt}
-                error={formValidation.stt}
-              />
-            </div>
-          )}
-          {(stt?.ssp ? stt.ssp : false) && (
-            <div className="usa-form-group margin-top-4">
-              <fieldset className="usa-fieldset">
-                <legend className="usa-label text-bold">File Type</legend>
-                <div className="usa-radio">
-                  <input
-                    className="usa-radio__input"
-                    id="tanf"
-                    type="radio"
-                    name="reportType"
-                    value="tanf"
-                    defaultChecked
-                    onChange={() => setFileTypeInputValue('tanf')}
-                  />
-                  <label className="usa-radio__label" htmlFor="tanf">
-                    TANF
-                  </label>
-                </div>
-                <div className="usa-radio">
-                  <input
-                    className="usa-radio__input"
-                    id="ssp-moe"
-                    type="radio"
-                    name="reportType"
-                    value="ssp-moe"
-                    onChange={() => setFileTypeInputValue('ssp-moe')}
+          <div className="grid-row grid-gap">
+            <div className="mobile:grid-container desktop:padding-0 desktop:grid-col-fill">
+              {(isOFAAdmin || isDIGITTeam || isSystemAdmin) && (
+                <div
+                  className={classNames(
+                    'usa-form-group maxw-mobile margin-top-4',
+                    {
+                      'usa-form-group--error': formValidation.stt,
+                    }
+                  )}
+                >
+                  <STTComboBox
+                    selectedStt={sttInputValue}
+                    selectStt={selectStt}
+                    error={formValidation.stt}
                   />
-                  <label className="usa-radio__label" htmlFor="ssp-moe">
-                    SSP-MOE
-                  </label>
-                </div>
-              </fieldset>
-            </div>
-          )}
-          <div
-            className={classNames('usa-form-group maxw-mobile margin-top-4', {
-              'usa-form-group--error': formValidation.year,
-            })}
-          >
-            <label
-              className="usa-label text-bold margin-top-4"
-              htmlFor="reportingYears"
-            >
-              Fiscal Year (October - September)
-              {formValidation.year && (
-                <div className="usa-error-message" id="years-error-alert">
-                  A fiscal year is required
                 </div>
               )}
-              {/* eslint-disable-next-line */}
-              <select
-                className={classNames('usa-select maxw-mobile', {
-                  'usa-combo-box__input--error': formValidation.year,
-                })}
-                name="reportingYears"
-                id="reportingYears"
-                onChange={selectYear}
-                value={yearInputValue}
-                aria-describedby="years-error-alert"
-              >
-                <option value="" disabled hidden>
-                  - Select Fiscal Year -
-                </option>
-                {constructYearOptions()}
-              </select>
-            </label>
-          </div>
-          <div
-            className={classNames('usa-form-group maxw-mobile margin-top-4', {
-              'usa-form-group--error': formValidation.quarter,
-            })}
-          >
-            <label
-              className="usa-label text-bold margin-top-4"
-              htmlFor="quarter"
-            >
-              Quarter
-              {formValidation.quarter && (
-                <div className="usa-error-message" id="quarter-error-alert">
-                  A quarter is required
+              {(stt?.ssp ? stt.ssp : false) && (
+                <div className="usa-form-group margin-top-4">
+                  <fieldset className="usa-fieldset">
+                    <legend className="usa-label text-bold">File Type</legend>
+                    <div className="usa-radio">
+                      <input
+                        className="usa-radio__input"
+                        id="tanf"
+                        type="radio"
+                        name="reportType"
+                        value="tanf"
+                        defaultChecked
+                        onChange={() => setFileTypeInputValue('tanf')}
+                      />
+                      <label className="usa-radio__label" htmlFor="tanf">
+                        TANF
+                      </label>
+                    </div>
+                    <div className="usa-radio">
+                      <input
+                        className="usa-radio__input"
+                        id="ssp-moe"
+                        type="radio"
+                        name="reportType"
+                        value="ssp-moe"
+                        onChange={() => setFileTypeInputValue('ssp-moe')}
+                      />
+                      <label className="usa-radio__label" htmlFor="ssp-moe">
+                        SSP-MOE
+                      </label>
+                    </div>
+                  </fieldset>
                 </div>
               )}
-              {/* eslint-disable-next-line */}
-              <select
-                className={classNames('usa-select maxw-mobile', {
-                  'usa-combo-box__input--error': formValidation.quarter,
-                })}
-                name="quarter"
-                id="quarter"
-                onChange={selectQuarter}
-                value={quarterInputValue}
-                aria-describedby="quarter-error-alert"
+            </div>
+          </div>
+          <div className="grid-row grid-gap">
+            <div className="mobile:grid-container desktop:padding-0 desktop:grid-col-auto">
+              <div
+                className={classNames(
+                  'usa-form-group maxw-mobile margin-top-4',
+                  {
+                    'usa-form-group--error': formValidation.year,
+                  }
+                )}
               >
-                <option value="" disabled hidden>
-                  - Select Quarter -
-                </option>
-                {Object.entries(quarters).map(
-                  ([quarter, quarterDescription]) => (
-                    <option value={quarter} key={quarter}>
-                      {quarterDescription}
+                <label
+                  className="usa-label text-bold margin-top-4"
+                  htmlFor="reportingYears"
+                >
+                  Fiscal Year (October - September)
+                  {formValidation.year && (
+                    <div className="usa-error-message" id="years-error-alert">
+                      A fiscal year is required
+                    </div>
+                  )}
+                  {/* eslint-disable-next-line */}
+              <select
+                    className={classNames('usa-select maxw-mobile', {
+                      'usa-combo-box__input--error': formValidation.year,
+                    })}
+                    name="reportingYears"
+                    id="reportingYears"
+                    onChange={selectYear}
+                    value={yearInputValue}
+                    aria-describedby="years-error-alert"
+                  >
+                    <option value="" disabled hidden>
+                      - Select Fiscal Year -
                     </option>
-                  )
+                    {constructYearOptions()}
+                  </select>
+                </label>
+              </div>
+              <div
+                className={classNames(
+                  'usa-form-group maxw-mobile margin-top-4',
+                  {
+                    'usa-form-group--error': formValidation.quarter,
+                  }
                 )}
-              </select>
-            </label>
+              >
+                <label
+                  className="usa-label text-bold margin-top-4"
+                  htmlFor="quarter"
+                >
+                  Quarter
+                  {formValidation.quarter && (
+                    <div className="usa-error-message" id="quarter-error-alert">
+                      A quarter is required
+                    </div>
+                  )}
+                  {/* eslint-disable-next-line */}
+              <select
+                    className={classNames('usa-select maxw-mobile', {
+                      'usa-combo-box__input--error': formValidation.quarter,
+                    })}
+                    name="quarter"
+                    id="quarter"
+                    onChange={selectQuarter}
+                    value={quarterInputValue}
+                    aria-describedby="quarter-error-alert"
+                  >
+                    <option value="" disabled hidden>
+                      - Select Quarter -
+                    </option>
+                    {Object.entries(quarters).map(
+                      ([quarter, quarterDescription]) => (
+                        <option value={quarter} key={quarter}>
+                          {quarterDescription}
+                        </option>
+                      )
+                    )}
+                  </select>
+                </label>
+              </div>
+              <Button
+                className="margin-y-4"
+                type="button"
+                onClick={() => {
+                  if (uploadedFiles && uploadedFiles.length > 0) {
+                    setErrorModalVisible(true)
+                  } else {
+                    handleSearch()
+                  }
+                }}
+              >
+                Search
+              </Button>
+            </div>
+            <div className="mobile:grid-container desktop:padding-0 desktop:grid-col-fill">
+              <table className="usa-table usa-table--striped margin-top-4 desktop:width-mobile-lg mobile:width-full">
+                <caption>Identifying the right Fiscal Year and Quarter</caption>
+                <thead>
+                  <tr>
+                    <th>Fiscal Quarter</th>
+                    <th>Calendar Period</th>
+                  </tr>
+                </thead>
+                <tbody>
+                  <tr>
+                    <td>Quarter 1</td>
+                    <td>Oct 1 - Dec 31</td>
+                  </tr>
+                  <tr>
+                    <td>Quarter 2</td>
+                    <td>Jan 1 - Mar 31</td>
+                  </tr>
+                  <tr>
+                    <td>Quarter 3</td>
+                    <td>Apr 1 - Jun 30</td>
+                  </tr>
+                  <tr>
+                    <td>Quarter 4</td>
+                    <td>Jul 1 - Sep 30</td>
+                  </tr>
+                </tbody>
+              </table>
+            </div>
           </div>
-          <Button
-            className="margin-y-4"
-            type="button"
-            onClick={() => {
-              if (uploadedFiles && uploadedFiles.length > 0) {
-                setErrorModalVisible(true)
-              } else {
-                handleSearch()
-              }
-            }}
-          >
-            Search
-          </Button>
         </form>
       </div>
+
       {isUploadReportToggled && (
         <>
           <h2