Skip to content

Commit

Permalink
Merge branch 'develop' into doc/3199-monitoring-adr
Browse files Browse the repository at this point in the history
  • Loading branch information
andrew-jameson authored Oct 2, 2024
2 parents 04a399c + fd5cc00 commit e5b4df9
Show file tree
Hide file tree
Showing 24 changed files with 913 additions and 191 deletions.
11 changes: 8 additions & 3 deletions Taskfile.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@ version: '3'

tasks:

upload-kibana-objs:
desc: Upload dashboards to Kibana server
cmds:
- curl -X POST localhost:5601/api/saved_objects/_import -H "kbn-xsrf: true" --form file=@tdrs-backend/tdpservice/search_indexes/kibana_saved_objs.ndjson

create-network:
desc: Create the external network
cmds:
Expand Down Expand Up @@ -29,7 +34,7 @@ tasks:
desc: Create Sentry service
dir: sentry
cmds:
# limiting the memory to 2GB and CPU to only one cpu @0, for faster response, you can remove the limittask : --cpuset-cpus 0
# limiting the memory to 2GB and CPU to only one cpu @0, for faster response, you can remove the limittask : --cpuset-cpus 0
- (docker run --privileged -p 9001:9000 -d --memory="8g" --memory-swap="8g" --name sentry docker:dind) || true
- docker exec sentry sh -c "git clone https://github.com/getsentry/self-hosted.git || true"

Expand Down Expand Up @@ -155,7 +160,7 @@ tasks:
- docker rm $(docker ps -aq) || true
- docker rmi $(docker images -q) || true
- docker volume rm $(docker volume ls -q) || true

clamav-up:
desc: Start clamav service
dir: tdrs-backend
Expand Down Expand Up @@ -187,7 +192,7 @@ tasks:
- task: frontend-up
- task: clamav-up


# need more work
frontend-init:
desc: Initialize the frontend project
Expand Down
2 changes: 1 addition & 1 deletion scripts/apply-remote-migrations.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ app=${1}
cd ./tdrs-backend

echo "Install dependencies..."
sudo apt install -y gcc
sudo apt-get install -y gcc && sudo apt-get install -y graphviz && sudo apt-get install -y graphviz-dev
sudo apt install -y libpq-dev python3-dev

python -m venv ./env
Expand Down
4 changes: 4 additions & 0 deletions scripts/deploy-backend.sh
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,10 @@ update_kibana()
cf add-network-policy "$CGAPPNAME_BACKEND" "$CGAPPNAME_KIBANA" --protocol tcp --port 5601
cf add-network-policy "$CGAPPNAME_FRONTEND" "$CGAPPNAME_KIBANA" --protocol tcp --port 5601
cf add-network-policy "$CGAPPNAME_KIBANA" "$CGAPPNAME_FRONTEND" --protocol tcp --port 80

# Upload dashboards to Kibana
CMD="curl -X POST $CGAPPNAME_KIBANA.apps.internal:5601/api/saved_objects/_import -H 'kbn-xsrf: true' --form file=@/home/vcap/app/tdpservice/search_indexes/kibana_saved_objs.ndjson"
cf run-task $CGAPPNAME_BACKEND --command "$CMD" --name kibana-obj-upload
}

update_backend()
Expand Down
2 changes: 2 additions & 0 deletions tdrs-backend/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ services:
- SERVER_BASEPATH=/kibana
- SERVER_SECURITYRESPONSEHEADERS_REFERRERPOLICY=no-referrer
- CSP_WARNLEGACYBROWSERS=false
volumes:
- ./search_indexes/kibana_saved_objs.ndjson:/usr/share/kibana/kibana_saved_objs.ndjson
depends_on:
- elastic

Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/manifest.kibana.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ applications:
SERVER_SECURITYRESPONSEHEADERS_REFERRERPOLICY: no-referrer
CSP_WARNLEGACYBROWSERS: false
docker:
image: docker.elastic.co/kibana/kibana-oss:7.4.2
image: docker.elastic.co/kibana/kibana-oss:7.10.2
command: |
export ELASTICSEARCH_HOSTS=http://$CGAPPNAME_PROXY.apps.internal:8080 &&
/usr/local/bin/dumb-init -- /usr/local/bin/kibana-docker
6 changes: 6 additions & 0 deletions tdrs-backend/tdpservice/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,6 +395,12 @@ def test_private_key():
yield get_private_key(key)


@pytest.fixture()
def system_user():
"""Create system user."""
return UserFactory.create(username='system')


# Register factories with pytest-factoryboy for automatic dependency injection
# of model-related fixtures into tests.
register(OwaspZapScanFactory)
Expand Down
2 changes: 2 additions & 0 deletions tdrs-backend/tdpservice/data_files/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from io import BytesIO
import xlsxwriter
import calendar
from tdpservice.parsers.models import ParserErrorCategoryChoices


def get_xls_serialized_file(data):
Expand Down Expand Up @@ -48,6 +49,7 @@ def format_error_msg(x):
('item_name', lambda x: ','.join([i for i in chk(x)['fields_json']['friendly_name'].values()])),
('internal_variable_name', lambda x: ','.join([i for i in chk(x)['fields_json']['friendly_name'].keys()])),
('row_number', lambda x: x['row_number']),
('error_type', lambda x: str(ParserErrorCategoryChoices(x['error_type']).label)),
]

# write beta banner
Expand Down
7 changes: 3 additions & 4 deletions tdrs-backend/tdpservice/parsers/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,11 +211,10 @@ def rollback_records(unsaved_records, datafile):
f"Encountered error while indexing datafile documents: \n{e}",
"error"
)
logger.warn("Encountered an Elastic exception, enforcing DB cleanup.")
logger.warning("Encountered an Elastic exception, enforcing DB cleanup.")
num_deleted, models = qset.delete()
logger.info("Succesfully performed DB cleanup after elastic failure.")
log_parser_exception(datafile,
"Succesfully performed DB cleanup after elastic failure.",
"Succesfully performed DB cleanup after elastic failure in rollback_records.",
"info"
)
except DatabaseError as e:
Expand Down Expand Up @@ -310,7 +309,7 @@ def delete_serialized_records(duplicate_manager, dfs):
total_deleted += num_deleted
dfs.total_number_of_records_created -= num_deleted
log_parser_exception(dfs.datafile,
"Succesfully performed DB cleanup after elastic failure.",
"Succesfully performed DB cleanup after elastic failure in delete_serialized_records.",
"info"
)
except DatabaseError as e:
Expand Down
82 changes: 41 additions & 41 deletions tdrs-backend/tdpservice/parsers/test/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,43 +184,43 @@ class Meta:
EMPLOYMENT_STATUS = 1
WORK_ELIGIBLE_INDICATOR = "01"
WORK_PART_STATUS = "01"
UNSUB_EMPLOYMENT = 1
SUB_PRIVATE_EMPLOYMENT = 1
SUB_PUBLIC_EMPLOYMENT = 1
WORK_EXPERIENCE_HOP = 1
WORK_EXPERIENCE_EA = 1
WORK_EXPERIENCE_HOL = 1
OJT = 1
JOB_SEARCH_HOP = 1
JOB_SEARCH_EA = 1
JOB_SEARCH_HOL = 1
COMM_SERVICES_HOP = 1
COMM_SERVICES_EA = 1
COMM_SERVICES_HOL = 1
VOCATIONAL_ED_TRAINING_HOP = 1
VOCATIONAL_ED_TRAINING_EA = 1
VOCATIONAL_ED_TRAINING_HOL = 1
JOB_SKILLS_TRAINING_HOP = 1
JOB_SKILLS_TRAINING_EA = 1
JOB_SKILLS_TRAINING_HOL = 1
ED_NO_HIGH_SCHOOL_DIPL_HOP = 1
ED_NO_HIGH_SCHOOL_DIPL_EA = 1
ED_NO_HIGH_SCHOOL_DIPL_HOL = 1
SCHOOL_ATTENDENCE_HOP = 1
SCHOOL_ATTENDENCE_EA = 1
SCHOOL_ATTENDENCE_HOL = 1
PROVIDE_CC_HOP = 1
PROVIDE_CC_EA = 1
PROVIDE_CC_HOL = 1
OTHER_WORK_ACTIVITIES = 1
DEEMED_HOURS_FOR_OVERALL = 1
DEEMED_HOURS_FOR_TWO_PARENT = 1
EARNED_INCOME = 1
UNEARNED_INCOME_TAX_CREDIT = 1
UNEARNED_SOCIAL_SECURITY = 1
UNEARNED_SSI = 1
UNEARNED_WORKERS_COMP = 1
OTHER_UNEARNED_INCOME = 1
UNSUB_EMPLOYMENT = "01"
SUB_PRIVATE_EMPLOYMENT = "01"
SUB_PUBLIC_EMPLOYMENT = "01"
WORK_EXPERIENCE_HOP = "01"
WORK_EXPERIENCE_EA = "01"
WORK_EXPERIENCE_HOL = "01"
OJT = "01"
JOB_SEARCH_HOP = "01"
JOB_SEARCH_EA = "01"
JOB_SEARCH_HOL = "01"
COMM_SERVICES_HOP = "01"
COMM_SERVICES_EA = "01"
COMM_SERVICES_HOL = "01"
VOCATIONAL_ED_TRAINING_HOP = "01"
VOCATIONAL_ED_TRAINING_EA = "01"
VOCATIONAL_ED_TRAINING_HOL = "01"
JOB_SKILLS_TRAINING_HOP = "01"
JOB_SKILLS_TRAINING_EA = "01"
JOB_SKILLS_TRAINING_HOL = "01"
ED_NO_HIGH_SCHOOL_DIPL_HOP = "01"
ED_NO_HIGH_SCHOOL_DIPL_EA = "01"
ED_NO_HIGH_SCHOOL_DIPL_HOL = "01"
SCHOOL_ATTENDENCE_HOP = "01"
SCHOOL_ATTENDENCE_EA = "01"
SCHOOL_ATTENDENCE_HOL = "01"
PROVIDE_CC_HOP = "01"
PROVIDE_CC_EA = "01"
PROVIDE_CC_HOL = "01"
OTHER_WORK_ACTIVITIES = "01"
DEEMED_HOURS_FOR_OVERALL = "01"
DEEMED_HOURS_FOR_TWO_PARENT = "01"
EARNED_INCOME = "01"
UNEARNED_INCOME_TAX_CREDIT = "01"
UNEARNED_SOCIAL_SECURITY = "01"
UNEARNED_SSI = "01"
UNEARNED_WORKERS_COMP = "01"
OTHER_UNEARNED_INCOME = "01"


class TanfT3Factory(factory.django.DjangoModelFactory):
Expand Down Expand Up @@ -451,10 +451,10 @@ class Meta:
CURRENT_MONTH_STATE_EXEMPT = 1
EMPLOYMENT_STATUS = 1
WORK_PART_STATUS = "01"
UNSUB_EMPLOYMENT = 1
SUB_PRIVATE_EMPLOYMENT = 1
SUB_PUBLIC_EMPLOYMENT = 1
OJT = 1
UNSUB_EMPLOYMENT = "01"
SUB_PRIVATE_EMPLOYMENT = "01"
SUB_PUBLIC_EMPLOYMENT = "01"
OJT = "01"
JOB_SEARCH = '1'
COMM_SERVICES = '1'
VOCATIONAL_ED_TRAINING = '1'
Expand Down
11 changes: 10 additions & 1 deletion tdrs-backend/tdpservice/parsers/test/test_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@


import pytest
import os
from django.contrib.admin.models import LogEntry
from django.conf import settings
from django.db.models import Q as Query
Expand Down Expand Up @@ -1299,7 +1300,7 @@ def test_parse_tribal_section_2_file(tribal_section_2_file, dfs):
t4 = t4_objs.first()
t5 = t5_objs.last()

assert t4.CLOSURE_REASON == 8
assert t4.CLOSURE_REASON == '15'
assert t5.COUNTABLE_MONTH_FED_TIME == ' 8'


Expand Down Expand Up @@ -1739,6 +1740,9 @@ def test_parse_duplicate(file, batch_size, model, record_type, num_errors, dfs,
settings.BULK_CREATE_BATCH_SIZE = batch_size

parse.parse_datafile(datafile, dfs)

settings.BULK_CREATE_BATCH_SIZE = os.getenv("BULK_CREATE_BATCH_SIZE", 10000)

parser_errors = ParserError.objects.filter(file=datafile,
error_type=ParserErrorCategoryChoices.CASE_CONSISTENCY).order_by('id')
for e in parser_errors:
Expand Down Expand Up @@ -1782,6 +1786,9 @@ def test_parse_partial_duplicate(file, batch_size, model, record_type, num_error
settings.BULK_CREATE_BATCH_SIZE = batch_size

parse.parse_datafile(datafile, dfs)

settings.BULK_CREATE_BATCH_SIZE = os.getenv("BULK_CREATE_BATCH_SIZE", 10000)

parser_errors = ParserError.objects.filter(file=datafile,
error_type=ParserErrorCategoryChoices.CASE_CONSISTENCY).order_by('id')
for e in parser_errors:
Expand All @@ -1806,6 +1813,8 @@ def test_parse_cat_4_edge_case_file(cat4_edge_case_file, dfs):

parse.parse_datafile(cat4_edge_case_file, dfs)

settings.BULK_CREATE_BATCH_SIZE = os.getenv("BULK_CREATE_BATCH_SIZE", 10000)

parser_errors = ParserError.objects.filter(file=cat4_edge_case_file).filter(
error_type=ParserErrorCategoryChoices.CASE_CONSISTENCY)

Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/tdpservice/parsers/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def transform(value, **kwargs):
month = transform_to_months(quarter)[month_index]
except ValueError:
return None
return f"{year}{month_to_int(month)}"
return int(f"{year}{month_to_int(month)}")
return transform

def tanf_ssn_decryption_func(value, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion tdrs-backend/tdpservice/parsers/validators/category3.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ def validate(record, row_schema):
"Caught exception in validator: validate__WORK_ELIGIBLE_INDICATOR__HOH__AGE. " +
f"With field values: {vals}."
)
logger.error(f'Exception: {e}')
logger.debug(f'Exception: {e}')
# Per conversation with Alex on 03/26/2024, returning the true case during exception handling to avoid
# confusing the STTs.
return true_case
Expand Down
25 changes: 14 additions & 11 deletions tdrs-backend/tdpservice/scheduling/management/db_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@


OS_ENV = os.environ
content_type = ContentType.objects.get_for_model(LogEntry)

def get_content_type():
"""Get content type for log entry."""
return ContentType.objects.get_for_model(LogEntry)

def get_system_values():
"""Return dict of keys and settings to use whether local or deployed."""
Expand Down Expand Up @@ -91,7 +94,7 @@ def backup_database(file_name,
logger.info(msg)
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Executed Database Backup",
action_flag=ADDITION,
Expand Down Expand Up @@ -123,7 +126,7 @@ def restore_database(file_name, postgres_client, database_uri, system_user):
msg = "Completed database creation."
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Executed Database create",
action_flag=ADDITION,
Expand All @@ -145,7 +148,7 @@ def restore_database(file_name, postgres_client, database_uri, system_user):
msg = "Completed database restoration."
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Executed Database restore",
action_flag=ADDITION,
Expand Down Expand Up @@ -177,7 +180,7 @@ def upload_file(file_name, bucket, sys_values, system_user, object_name=None, re
msg = "Successfully uploaded {} to s3://{}/{}.".format(file_name, bucket, object_name)
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Executed database backup S3 upload",
action_flag=ADDITION,
Expand Down Expand Up @@ -208,7 +211,7 @@ def download_file(bucket,
msg = "Successfully downloaded s3 file {}/{} to {}.".format(bucket, object_name, file_name)
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Executed database backup S3 download",
action_flag=ADDITION,
Expand Down Expand Up @@ -293,7 +296,7 @@ def main(argv, sys_values, system_user):
if arg_to_backup:
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Begining Database Backup",
action_flag=ADDITION,
Expand All @@ -316,7 +319,7 @@ def main(argv, sys_values, system_user):

LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Finished Database Backup",
action_flag=ADDITION,
Expand All @@ -329,7 +332,7 @@ def main(argv, sys_values, system_user):
elif arg_to_restore:
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Begining Database Restore",
action_flag=ADDITION,
Expand All @@ -352,7 +355,7 @@ def main(argv, sys_values, system_user):

LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Finished Database Restore",
action_flag=ADDITION,
Expand All @@ -377,7 +380,7 @@ def run_backup(arg):
logger.error(f"Caught Exception in run_backup. Exception: {e}.")
LogEntry.objects.log_action(
user_id=system_user.pk,
content_type_id=content_type.pk,
content_type_id=get_content_type().pk,
object_id=None,
object_repr="Exception in run_backup",
action_flag=ADDITION,
Expand Down
Loading

0 comments on commit e5b4df9

Please sign in to comment.