Skip to content

Commit

Permalink
- adding logging throughout
Browse files Browse the repository at this point in the history
  • Loading branch information
elipe17 committed Jun 28, 2024
1 parent 0ef832a commit 4d00a5f
Show file tree
Hide file tree
Showing 5 changed files with 94 additions and 17 deletions.
2 changes: 1 addition & 1 deletion tdrs-backend/tdpservice/scheduling/db_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def restore_database(file_name, postgres_client, database_uri, system_user):
logger.info(msg)
except Exception as e:
logger.error(f"Caught exception while creating the database. Exception: {e}.")
return False
raise e

# write .pgpass
with open('/home/vcap/.pgpass', 'w') as f:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
import os
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
from tdpservice.data_files.models import DataFile
from tdpservice.scheduling import parser_task
from tdpservice.search_indexes.documents import tanf, ssp, tribal
from tdpservice.scheduling.db_backup import main, get_system_values
from tdpservice.users.models import User
from tdpservice.core.utils import log
import logging

logger = logging.getLogger(__name__)
Expand All @@ -27,13 +22,19 @@ def handle(self, *args, **options):
if (not options['backup'] and not options['restore']) or (options['backup'] and options['restore']):
print("\nYou must specify -b or -r but not both.\n")
return

switch = '-b' if options['backup'] else '-r'
file = options["file"]

if not settings.USE_LOCALSTACK:
system_user, created = User.objects.get_or_create(username='system')
if created:
logger.debug('Created reserved system user.')
main([f'{switch}', '-f', f'{file}'], sys_values=get_system_values(), system_user=system_user)
try:
main([f'{switch}', '-f', f'{file}'], sys_values=get_system_values(), system_user=system_user)
except Exception as e:
logger.error(f"Exception occured while executing backup/restore: {e}")
raise e
logger.info("Cloud backup/restore job complete.")
else:
os.system(f"export PGPASSWORD={settings.DATABASES['default']['PASSWORD']}")
Expand All @@ -45,7 +46,7 @@ def handle(self, *args, **options):

if options['backup']:
cmd = (f"pg_dump -h {db_host} -p {db_port} -d {db_name} -U {db_user} -F c --no-password --no-acl "
f"--no-owner -f {file} -v")
f"--no-owner -f {file}")
os.system(cmd)
logger.info(f"Local backup saved to: {file}.")
elif options['restore']:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
from tdpservice.data_files.models import DataFile
from tdpservice.scheduling import parser_task
from tdpservice.search_indexes.documents import tanf, ssp, tribal
from tdpservice.users.models import User
from tdpservice.core.utils import log
from django.contrib.admin.models import ADDITION
from tdpservice.users.models import User
from datetime import datetime
import logging

Expand All @@ -27,6 +28,13 @@ def add_arguments(self, parser):
parser.add_argument("--fiscal_year", type=str)
parser.add_argument("--all", action='store_true')

def __get_log_context(self, system_user):
context = {'user_id': system_user.id,
'action_flag': ADDITION,
'object_repr': "Clean and Reparse"
}
return context

def handle(self, *args, **options):
"""Delete datafiles matching a query."""
fiscal_year = options.get('fiscal_year', None)
Expand Down Expand Up @@ -70,17 +78,45 @@ def handle(self, *args, **options):
print('Cancelled.')
return

system_user, created = User.objects.get_or_create(username='system')
if created:
logger.debug('Created reserved system user.')
log_context = self.__get_log_context(system_user)

all_fy = "all"
all_q = "1-4"
log(f"Beginning Clean and reparse for FY {fiscal_year if fiscal_year else all_fy} and "
f"Q{fiscal_quarter if fiscal_quarter else all_q}",
logger_context=log_context,
level='info')

if files.count() == 0:
log(f"No files available for the selected Fiscal Year: {fiscal_year if fiscal_year else all_fy} and "
f"Quarter: {fiscal_quarter if fiscal_quarter else all_q}. Nothing to do.",
logger_context=log_context,
level='warn')
return

try:
logger.info("Begining reparse DB Backup.")
logger.info("Beginning reparse DB Backup.")
pattern = "%d-%m-%Y_%H:%M:%S"
backup_file_name += f"_{datetime.now().strftime(pattern)}.pg"
call_command('backup_restore_db', '-b', '-f', f'{backup_file_name}')
logger.info("Backup complete! Commencing clean and reparse.")
except Exception as e:
logger.critical('Database backup FAILED. Clean and re-parse NOT executed. Database IS consistent!')
log(f"Database backup FAILED. Clean and re-parse NOT executed. Database and Elastic are CONSISTENT!",
logger_context=log_context,
level='error')
raise e

call_command('tdp_search_index', '--create', '-f')
try:
call_command('tdp_search_index', '--create', '-f')
except Exception as e:
log(f"Elastic index creation FAILED. Clean and re-parse NOT executed. "
"Database is CONSISTENT, Elastic is INCONSISTENT!",
logger_context=log_context,
level='error')
raise e

file_ids = files.values_list('id', flat=True).distinct()

Expand All @@ -90,6 +126,10 @@ def handle(self, *args, **options):
tribal.Tribal_TANF_T1, tribal.Tribal_TANF_T2, tribal.Tribal_TANF_T3, tribal.Tribal_TANF_T4, tribal.Tribal_TANF_T5, tribal.Tribal_TANF_T6, tribal.Tribal_TANF_T7
]

log("DB backup and Index creation complete. Beginning database cleanse.",
logger_context=log_context,
level='info')

for m in model_types:
objs = m.objects.all().filter(datafile_id__in=file_ids)
logger.info(f'Deleting {objs.count()}, {m} objects')
Expand All @@ -98,24 +138,40 @@ def handle(self, *args, **options):
try:
objs._raw_delete(objs.db)
except Exception as e:
logger.critical(f'_raw_delete failed for model {m}. Database is now inconsistent! Restore the DB from '
'the backup as soon as possible!')
log(f'_raw_delete failed for model {m}. Database and Elastic are INCONSISTENT! Restore the DB from '
'the backup as soon as possible!',
logger_context=log_context,
level='critical')
raise e

logger.info(f'Deleting and reparsing {files.count()} files')
for f in files:
try:
f.delete()
except Exception as e:
logger.error(f'DataFile.delete failed for id: {f.pk}')
log(f'DataFile.delete failed for id: {f.pk}. Database and Elastic are INCONSISTENT! Restore the '
'DB from the backup as soon as possible!',
logger_context=log_context,
level='critical')
raise e

try:
f.save()
except Exception as e:
logger.error(f'DataFile.save failed for id: {f.pk}')
log(f'DataFile.save failed for id: {f.pk}. Database and Elastic are INCONSISTENT! Restore the '
'DB from the backup as soon as possible!',
logger_context=log_context,
level='critical')
raise e

# latest version only? -> possible new ticket
parser_task.parse.delay(f.pk, should_send_submission_email=False)

log("Database cleansing complete and all files have been rescheduling for parsing and validation.",
logger_context=log_context,
level='info')
log(f"Clean and reparse completed for FY {fiscal_year if fiscal_year else all_fy} and "
f"Q{fiscal_quarter if fiscal_quarter else all_q}",
logger_context=log_context,
level='info')
logger.info('Done. All tasks have been queued to re-parse the selected datafiles.')
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@
from django_elasticsearch_dsl.management.commands import search_index
from django_elasticsearch_dsl.registries import registry
from django.conf import settings
from tdpservice.core.utils import log
from django.contrib.admin.models import ADDITION
from tdpservice.users.models import User


class Command(search_index.Command):
Expand All @@ -18,7 +21,15 @@ class Command(search_index.Command):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)

def __get_log_context(self):
context = {'user_id': User.objects.get_or_create(username='system')[0].id,
'action_flag': ADDITION,
'object_repr': "Elastic Index Creation"
}
return context

def _create(self, models, aliases, options):
log_context = self.__get_log_context()
options['use_alias'] = True
options['use_alias_keep_index'] = True
alias_index_pairs = []
Expand All @@ -36,6 +47,10 @@ def _create(self, models, aliases, options):
)
index._name = new_index

log(f"All aliased indexes created with suffix: {index_suffix}",
logger_context=log_context,
level='info')

super()._create(models, aliases, options)

for alias_index_pair in alias_index_pairs:
Expand All @@ -45,6 +60,10 @@ def _create(self, models, aliases, options):
alias, alias_index_pair['index'], alias_exists, options
)

log(f"Aliased index creation complete.",
logger_context=log_context,
level='info')

def _populate(self, models, options):
parallel = options['parallel']
for doc in registry.get_documents(models):
Expand Down
3 changes: 2 additions & 1 deletion tdrs-frontend/.env
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ REACT_APP_DEBOUNCE_TIME=30000
REACT_APP_EVENT_THROTTLE_TIME=60000

# Enable the Kibana tab for dev purposes.
# REACT_APP_DEV_KIBANA=true
# TODO: Comment once done developing
REACT_APP_DEV_KIBANA=true

# Setup SCSS:
# The following makes it possible to import SASS modules
Expand Down

0 comments on commit 4d00a5f

Please sign in to comment.