Skip to content

Commit

Permalink
Merge branch 'develop' into feature/2729-remote-migrations-2
Browse files Browse the repository at this point in the history
  • Loading branch information
jtimpe committed Feb 26, 2024
2 parents 6fa9cab + 0619329 commit 001f641
Show file tree
Hide file tree
Showing 24 changed files with 336 additions and 823 deletions.
12 changes: 8 additions & 4 deletions .circleci/owasp/jobs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@
cf_space:
type: string
default: tanf-staging
cf_org:
type: string
default: "CF_ORG"
target_env:
type: enum
enum: [ "staging", "develop", "prod" ]
Expand All @@ -64,6 +67,11 @@
- sudo-check
- cf-check
- docker-compose-check
- login-cloud-dot-gov:
cf-password: <<parameters.cf_password>>
cf-space: <<parameters.cf_space>>
cf-org: <<parameters.cf_org>>
cf-username: <<parameters.cf_username>>
- run-owasp-scan:
environment: nightly
target: backend
Expand All @@ -72,10 +80,6 @@
environment: nightly
target: frontend
target_env: <<parameters.target_env>>
- login-cloud-dot-gov:
cf-password: <<parameters.cf_password>>
cf-space: <<parameters.cf_space>>
cf-username: <<parameters.cf_username>>
- run:
name: Run post-processing task to record OWASP ZAP results
command: |
Expand Down
91 changes: 0 additions & 91 deletions tdrs-backend/docs/api/set_profile.md

This file was deleted.

9 changes: 9 additions & 0 deletions tdrs-backend/gunicorn_start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,15 @@ else
( cd /home/vcap/deps/0/bin/; ./redis-server /home/vcap/app/redis.conf &)
fi

# Collect static files. This is needed for swagger to work in local environment
if [[ $DISABLE_COLLECTSTATIC ]]; then
echo "DISABLE_COLLECTSTATIC is set to true, skipping collectstatic"
else
echo "Collecting static files"
python manage.py collectstatic --noinput
fi


celery -A tdpservice.settings worker -c 1 &
sleep 5
# TODO: Uncomment the following line to add flower service when memory limitation is resolved
Expand Down
1 change: 0 additions & 1 deletion tdrs-backend/tdpservice/data_files/serializers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Serialize stt data."""
import logging
from rest_framework import serializers

from tdpservice.parsers.models import ParserError
from tdpservice.data_files.errors import ImmutabilityError
from tdpservice.data_files.models import DataFile
Expand Down
99 changes: 99 additions & 0 deletions tdrs-backend/tdpservice/data_files/test/test_api.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
"""Tests for DataFiles Application."""
from rest_framework import status
import pytest
import base64
import openpyxl
from django.core import mail
from tdpservice.data_files.models import DataFile
from tdpservice.users.models import AccountApprovalStatusChoices
from tdpservice.parsers import parse, util
from tdpservice.parsers.models import ParserError


@pytest.mark.usefixtures('db')
Expand All @@ -24,6 +28,11 @@ def user(self):
"""
raise NotImplementedError()

@pytest.fixture
def test_datafile(self, stt_user, stt):
"""Fixture for small_incorrect_file_cross_validator."""
return util.create_test_datafile('small_incorrect_file_cross_validator.txt', stt_user, stt)

@pytest.fixture
def api_client(self, api_client, user):
"""Provide an API client that is logged in with the specified user."""
Expand Down Expand Up @@ -57,6 +66,46 @@ def assert_data_file_content_matches(response, data_file_id):
data_file_file = DataFile.objects.get(id=data_file_id)
assert b''.join(response.streaming_content) == data_file_file.file.read()

@staticmethod
def assert_error_report_file_content_matches_with_friendly_names(response):
"""Assert the error report file contents match expected with friendly names."""
decoded_response = base64.b64decode(response.data['xls_report'])

# write the excel file to disk
with open('mycls.xlsx', 'wb') as f:
f.write(decoded_response)

# read the excel file from disk
wb = openpyxl.load_workbook('mycls.xlsx')
ws = wb.get_sheet_by_name('Sheet1')

COL_ERROR_MESSAGE = 5

assert ws.cell(row=1, column=1).value == "Error reporting in TDP is still in development.We'll" \
+ " be in touch when it's ready to use!For now please refer to the reports you receive via email"
assert ws.cell(row=4, column=COL_ERROR_MESSAGE).value == "if cash amount :873 validator1 passed" \
+ " then number of months 0 is not larger than 0."

@staticmethod
def assert_error_report_file_content_matches_without_friendly_names(response):
"""Assert the error report file contents match expected without friendly names."""
decoded_response = base64.b64decode(response.data['xls_report'])

# write the excel file to disk
with open('mycls.xlsx', 'wb') as f:
f.write(decoded_response)

# read the excel file from disk
wb = openpyxl.load_workbook('mycls.xlsx')
ws = wb.get_sheet_by_name('Sheet1')

COL_ERROR_MESSAGE = 5

assert ws.cell(row=1, column=1).value == "Error reporting in TDP is still in development.We'll" \
+ " be in touch when it's ready to use!For now please refer to the reports you receive via email"
assert ws.cell(row=4, column=COL_ERROR_MESSAGE).value == "if CASH_AMOUNT :873 validator1 passed" \
+ " then NBR_MONTHS 0 is not larger than 0."

@staticmethod
def assert_data_file_exists(data_file_data, version, user):
"""Confirm that a data file matching the provided data exists in the DB."""
Expand Down Expand Up @@ -91,6 +140,10 @@ def download_file(self, api_client, data_file_id):
"""Stream a file for download."""
return api_client.get(f"{self.root_url}{data_file_id}/download/")

def download_error_report_file(self, api_client, data_file_id):
"""Download the ParserError xlsx report."""
return api_client.get(f"{self.root_url}{data_file_id}/download_error_report/")


class TestDataFileAPIAsOfaAdmin(DataFileAPITestBase):
"""Test DataFileViewSet as an OFA Admin user."""
Expand Down Expand Up @@ -187,6 +240,32 @@ def test_download_data_file_file_for_own_stt(
assert response.status_code == status.HTTP_200_OK
self.assert_data_file_content_matches(response, data_file_id)

def test_download_error_report_file_for_own_stt(
self, api_client, test_datafile
):
"""Test that the error report file is downloaded as expected for a Data Analyst's set STT."""
parse.parse_datafile(test_datafile)
response = self.download_error_report_file(api_client, test_datafile.id)

assert response.status_code == status.HTTP_200_OK
self.assert_error_report_file_content_matches_with_friendly_names(response)

def test_download_error_report_file_for_own_stt_no_fields_json(
self, api_client, test_datafile
):
"""Test that the error report file is downloaded as expected when no fields_json is added to ParserErrors."""
parse.parse_datafile(test_datafile)

# remove the fields' friendly names for all parser errors
for error in ParserError.objects.all():
error.fields_json = None
error.save()

response = self.download_error_report_file(api_client, test_datafile.id)

assert response.status_code == status.HTTP_200_OK
self.assert_error_report_file_content_matches_without_friendly_names(response)

def test_download_data_file_file_rejected_for_other_stt(
self,
api_client,
Expand All @@ -207,6 +286,26 @@ def test_download_data_file_file_rejected_for_other_stt(

assert response.status_code == status.HTTP_403_FORBIDDEN

def test_download_error_report_rejected_for_other_stt(
self,
api_client,
data_file_data,
other_stt,
user
):
"""Test that the error report download is rejected when user's STT doesn't match."""
response = self.post_data_file_file(api_client, data_file_data)
data_file_id = response.data['id']

# Update the STT to something other than the user's
data_file_file = DataFile.objects.get(id=data_file_id)
data_file_file.stt = other_stt
data_file_file.save()

response = self.download_error_report_file(api_client, data_file_id)

assert response.status_code == status.HTTP_403_FORBIDDEN

def test_data_files_data_upload_ssp(
self, api_client, data_file_data,
):
Expand Down
80 changes: 80 additions & 0 deletions tdrs-backend/tdpservice/data_files/util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
"""Utility functions for DataFile views."""
import base64
from io import BytesIO
import xlsxwriter
import calendar


def get_xls_serialized_file(data):
"""Return xls file created from the error."""

def chk(x):
"""Check if fields_json is not None."""
x['fields_json'] = x['fields_json'] if x.get('fields_json', None) else {
'friendly_name': {
x['field_name']: x['field_name']
},
}
x['fields_json']['friendly_name'] = x['fields_json']['friendly_name'] if x['fields_json'].get(
'friendly_name', None) else {
x['field_name']: x['field_name']
}
if None in x['fields_json']['friendly_name'].keys():
x['fields_json']['friendly_name'].pop(None)
if None in x['fields_json']['friendly_name'].values():
x['fields_json']['friendly_name'].pop()
return x

def format_error_msg(x):
"""Format error message."""
error_msg = x['error_message']
for key, value in x['fields_json']['friendly_name'].items():
error_msg = error_msg.replace(key, value) if value else error_msg
return error_msg

row, col = 0, 0
output = BytesIO()
workbook = xlsxwriter.Workbook(output)
worksheet = workbook.add_worksheet()
report_columns = [
('case_number', lambda x: x['case_number']),
('year', lambda x: str(x['rpt_month_year'])[0:4] if x['rpt_month_year'] else None),
('month', lambda x: calendar.month_name[
int(str(x['rpt_month_year'])[4:])
] if x['rpt_month_year'] else None),
('error_type', lambda x: x['error_type']),
('error_message', lambda x: format_error_msg(chk(x))),
('item_number', lambda x: x['item_number']),
('item_name', lambda x: ','.join([i for i in chk(x)['fields_json']['friendly_name'].values()])),
('internal_variable_name', lambda x: ','.join([i for i in chk(x)['fields_json']['friendly_name'].keys()])),
('row_number', lambda x: x['row_number']),
('column_number', lambda x: x['column_number'])
]

# write beta banner
worksheet.write(row, col,
"Error reporting in TDP is still in development." +
"We'll be in touch when it's ready to use!" +
"For now please refer to the reports you receive via email")
row, col = 2, 0
# write csv header
bold = workbook.add_format({'bold': True})

def format_header(header_list: list):
"""Format header."""
return ' '.join([i.capitalize() for i in header_list.split('_')])

# We will write the headers in the first row
[worksheet.write(row, col, format_header(key[0]), bold) for col, key in enumerate(report_columns)]

[
worksheet.write(row + 3, col, key[1](data_i)) for col, key in enumerate(report_columns)
for row, data_i in enumerate(data)
]

# autofit all columns except for the first one
worksheet.autofit()
worksheet.set_column(0, 0, 20)

workbook.close()
return {"data": data, "xls_report": base64.b64encode(output.getvalue()).decode("utf-8")}
Loading

0 comments on commit 001f641

Please sign in to comment.