From 4d00a5fdeef892480fd1eeb7f3a395cd02639940 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 28 Jun 2024 13:34:31 -0400 Subject: [PATCH] - adding logging throughout --- .../tdpservice/scheduling/db_backup.py | 2 +- .../management/commands/backup_restore_db.py | 15 ++-- .../management/commands/clean_and_reparse.py | 72 ++++++++++++++++--- .../management/commands/tdp_search_index.py | 19 +++++ tdrs-frontend/.env | 3 +- 5 files changed, 94 insertions(+), 17 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 05f51ad09..4bdf82b4e 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -142,7 +142,7 @@ def restore_database(file_name, postgres_client, database_uri, system_user): logger.info(msg) except Exception as e: logger.error(f"Caught exception while creating the database. Exception: {e}.") - return False + raise e # write .pgpass with open('/home/vcap/.pgpass', 'w') as f: diff --git a/tdrs-backend/tdpservice/search_indexes/management/commands/backup_restore_db.py b/tdrs-backend/tdpservice/search_indexes/management/commands/backup_restore_db.py index 454704f78..fdf05bd63 100644 --- a/tdrs-backend/tdpservice/search_indexes/management/commands/backup_restore_db.py +++ b/tdrs-backend/tdpservice/search_indexes/management/commands/backup_restore_db.py @@ -1,13 +1,8 @@ import os from django.core.management.base import BaseCommand -from django.core.management import call_command from django.conf import settings -from tdpservice.data_files.models import DataFile -from tdpservice.scheduling import parser_task -from tdpservice.search_indexes.documents import tanf, ssp, tribal from tdpservice.scheduling.db_backup import main, get_system_values from tdpservice.users.models import User -from tdpservice.core.utils import log import logging logger = logging.getLogger(__name__) @@ -27,13 +22,19 @@ def handle(self, *args, **options): if (not options['backup'] and not options['restore']) or (options['backup'] and options['restore']): print("\nYou must specify -b or -r but not both.\n") return + switch = '-b' if options['backup'] else '-r' file = options["file"] + if not settings.USE_LOCALSTACK: system_user, created = User.objects.get_or_create(username='system') if created: logger.debug('Created reserved system user.') - main([f'{switch}', '-f', f'{file}'], sys_values=get_system_values(), system_user=system_user) + try: + main([f'{switch}', '-f', f'{file}'], sys_values=get_system_values(), system_user=system_user) + except Exception as e: + logger.error(f"Exception occured while executing backup/restore: {e}") + raise e logger.info("Cloud backup/restore job complete.") else: os.system(f"export PGPASSWORD={settings.DATABASES['default']['PASSWORD']}") @@ -45,7 +46,7 @@ def handle(self, *args, **options): if options['backup']: cmd = (f"pg_dump -h {db_host} -p {db_port} -d {db_name} -U {db_user} -F c --no-password --no-acl " - f"--no-owner -f {file} -v") + f"--no-owner -f {file}") os.system(cmd) logger.info(f"Local backup saved to: {file}.") elif options['restore']: diff --git a/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py b/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py index 0a7b32439..8d19ffb9b 100644 --- a/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py +++ b/tdrs-backend/tdpservice/search_indexes/management/commands/clean_and_reparse.py @@ -8,8 +8,9 @@ from tdpservice.data_files.models import DataFile from tdpservice.scheduling import parser_task from tdpservice.search_indexes.documents import tanf, ssp, tribal -from tdpservice.users.models import User from tdpservice.core.utils import log +from django.contrib.admin.models import ADDITION +from tdpservice.users.models import User from datetime import datetime import logging @@ -27,6 +28,13 @@ def add_arguments(self, parser): parser.add_argument("--fiscal_year", type=str) parser.add_argument("--all", action='store_true') + def __get_log_context(self, system_user): + context = {'user_id': system_user.id, + 'action_flag': ADDITION, + 'object_repr': "Clean and Reparse" + } + return context + def handle(self, *args, **options): """Delete datafiles matching a query.""" fiscal_year = options.get('fiscal_year', None) @@ -70,17 +78,45 @@ def handle(self, *args, **options): print('Cancelled.') return + system_user, created = User.objects.get_or_create(username='system') + if created: + logger.debug('Created reserved system user.') + log_context = self.__get_log_context(system_user) + + all_fy = "all" + all_q = "1-4" + log(f"Beginning Clean and reparse for FY {fiscal_year if fiscal_year else all_fy} and " + f"Q{fiscal_quarter if fiscal_quarter else all_q}", + logger_context=log_context, + level='info') + + if files.count() == 0: + log(f"No files available for the selected Fiscal Year: {fiscal_year if fiscal_year else all_fy} and " + f"Quarter: {fiscal_quarter if fiscal_quarter else all_q}. Nothing to do.", + logger_context=log_context, + level='warn') + return + try: - logger.info("Begining reparse DB Backup.") + logger.info("Beginning reparse DB Backup.") pattern = "%d-%m-%Y_%H:%M:%S" backup_file_name += f"_{datetime.now().strftime(pattern)}.pg" call_command('backup_restore_db', '-b', '-f', f'{backup_file_name}') logger.info("Backup complete! Commencing clean and reparse.") except Exception as e: - logger.critical('Database backup FAILED. Clean and re-parse NOT executed. Database IS consistent!') + log(f"Database backup FAILED. Clean and re-parse NOT executed. Database and Elastic are CONSISTENT!", + logger_context=log_context, + level='error') raise e - call_command('tdp_search_index', '--create', '-f') + try: + call_command('tdp_search_index', '--create', '-f') + except Exception as e: + log(f"Elastic index creation FAILED. Clean and re-parse NOT executed. " + "Database is CONSISTENT, Elastic is INCONSISTENT!", + logger_context=log_context, + level='error') + raise e file_ids = files.values_list('id', flat=True).distinct() @@ -90,6 +126,10 @@ def handle(self, *args, **options): tribal.Tribal_TANF_T1, tribal.Tribal_TANF_T2, tribal.Tribal_TANF_T3, tribal.Tribal_TANF_T4, tribal.Tribal_TANF_T5, tribal.Tribal_TANF_T6, tribal.Tribal_TANF_T7 ] + log("DB backup and Index creation complete. Beginning database cleanse.", + logger_context=log_context, + level='info') + for m in model_types: objs = m.objects.all().filter(datafile_id__in=file_ids) logger.info(f'Deleting {objs.count()}, {m} objects') @@ -98,8 +138,10 @@ def handle(self, *args, **options): try: objs._raw_delete(objs.db) except Exception as e: - logger.critical(f'_raw_delete failed for model {m}. Database is now inconsistent! Restore the DB from ' - 'the backup as soon as possible!') + log(f'_raw_delete failed for model {m}. Database and Elastic are INCONSISTENT! Restore the DB from ' + 'the backup as soon as possible!', + logger_context=log_context, + level='critical') raise e logger.info(f'Deleting and reparsing {files.count()} files') @@ -107,15 +149,29 @@ def handle(self, *args, **options): try: f.delete() except Exception as e: - logger.error(f'DataFile.delete failed for id: {f.pk}') + log(f'DataFile.delete failed for id: {f.pk}. Database and Elastic are INCONSISTENT! Restore the ' + 'DB from the backup as soon as possible!', + logger_context=log_context, + level='critical') raise e try: f.save() except Exception as e: - logger.error(f'DataFile.save failed for id: {f.pk}') + log(f'DataFile.save failed for id: {f.pk}. Database and Elastic are INCONSISTENT! Restore the ' + 'DB from the backup as soon as possible!', + logger_context=log_context, + level='critical') raise e # latest version only? -> possible new ticket parser_task.parse.delay(f.pk, should_send_submission_email=False) + + log("Database cleansing complete and all files have been rescheduling for parsing and validation.", + logger_context=log_context, + level='info') + log(f"Clean and reparse completed for FY {fiscal_year if fiscal_year else all_fy} and " + f"Q{fiscal_quarter if fiscal_quarter else all_q}", + logger_context=log_context, + level='info') logger.info('Done. All tasks have been queued to re-parse the selected datafiles.') diff --git a/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py b/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py index b663a0cb4..9fe6e4b7d 100644 --- a/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py +++ b/tdrs-backend/tdpservice/search_indexes/management/commands/tdp_search_index.py @@ -10,6 +10,9 @@ from django_elasticsearch_dsl.management.commands import search_index from django_elasticsearch_dsl.registries import registry from django.conf import settings +from tdpservice.core.utils import log +from django.contrib.admin.models import ADDITION +from tdpservice.users.models import User class Command(search_index.Command): @@ -18,7 +21,15 @@ class Command(search_index.Command): def __init__(self, *args, **kwargs): super(Command, self).__init__(*args, **kwargs) + def __get_log_context(self): + context = {'user_id': User.objects.get_or_create(username='system')[0].id, + 'action_flag': ADDITION, + 'object_repr': "Elastic Index Creation" + } + return context + def _create(self, models, aliases, options): + log_context = self.__get_log_context() options['use_alias'] = True options['use_alias_keep_index'] = True alias_index_pairs = [] @@ -36,6 +47,10 @@ def _create(self, models, aliases, options): ) index._name = new_index + log(f"All aliased indexes created with suffix: {index_suffix}", + logger_context=log_context, + level='info') + super()._create(models, aliases, options) for alias_index_pair in alias_index_pairs: @@ -45,6 +60,10 @@ def _create(self, models, aliases, options): alias, alias_index_pair['index'], alias_exists, options ) + log(f"Aliased index creation complete.", + logger_context=log_context, + level='info') + def _populate(self, models, options): parallel = options['parallel'] for doc in registry.get_documents(models): diff --git a/tdrs-frontend/.env b/tdrs-frontend/.env index 11902ac81..794c59a5b 100644 --- a/tdrs-frontend/.env +++ b/tdrs-frontend/.env @@ -37,7 +37,8 @@ REACT_APP_DEBOUNCE_TIME=30000 REACT_APP_EVENT_THROTTLE_TIME=60000 # Enable the Kibana tab for dev purposes. -# REACT_APP_DEV_KIBANA=true +# TODO: Comment once done developing +REACT_APP_DEV_KIBANA=true # Setup SCSS: # The following makes it possible to import SASS modules