From d8a6034d5f9cd94b2f1e5deae4c7cda365e4f01d Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 16 Feb 2024 15:16:45 -0700 Subject: [PATCH 01/38] - pointing directly to the pg client directory --- tdrs-backend/tdpservice/scheduling/db_backup.py | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index ffcce9f22..a93d1234e 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -26,18 +26,7 @@ def get_system_values(): sys_values['SPACE'] = json.loads(OS_ENV['VCAP_APPLICATION'])['space_name'] # Postgres client pg_dump directory - pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], - stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) - pgdump_search.wait() - pg_dump_paths, pgdump_search_error = pgdump_search.communicate() - pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') - if pg_dump_paths[0] == '': - raise Exception("Postgres client is not found") - - for _ in pg_dump_paths: - if 'pg_dump' in str(_) and 'postgresql' in str(_): - sys_values['POSTGRES_CLIENT'] = _[:_.find('pg_dump')] - print("Found PG client here: {}".format(_)) + sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/bin/" sys_values['S3_ENV_VARS'] = json.loads(OS_ENV['VCAP_SERVICES'])['s3'] sys_values['S3_CREDENTIALS'] = sys_values['S3_ENV_VARS'][0]['credentials'] @@ -212,7 +201,7 @@ def main(argv, sys_values): if arg_to_backup: # back up database backup_database(file_name=arg_file, - postgres_client=sys_values['POSTGRES_CLIENT'], + postgres_client=sys_values['POSTGRES_CLIENT_DIR'], database_uri=arg_database) # upload backup file @@ -232,7 +221,7 @@ def main(argv, sys_values): # restore database restore_database(file_name=arg_file, - postgres_client=sys_values['POSTGRES_CLIENT'], + postgres_client=sys_values['POSTGRES_CLIENT_DIR'], database_uri=arg_database) os.system('rm ' + arg_file) From 6b0a1f0d1bb5c4b13205046e40b51f1ca980790b Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 16 Feb 2024 15:34:38 -0700 Subject: [PATCH 02/38] - Add lots of logging --- .../tdpservice/scheduling/db_backup.py | 30 +++++++++++++++---- tdrs-backend/tdpservice/scheduling/tasks.py | 22 +++++++++++++- tdrs-backend/tdpservice/settings/common.py | 2 +- 3 files changed, 46 insertions(+), 8 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index a93d1234e..7675785e9 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -10,6 +10,7 @@ import subprocess import sys from django.conf import settings +from django.contrib.admin.models import LogEntry, ADDITION import boto3 import logging @@ -71,12 +72,14 @@ def backup_database(file_name, pg_dump -F c --no-acl --no-owner -f backup.pg postgresql://${USERNAME}:${PASSWORD}@${HOST}:${PORT}/${NAME} """ try: - os.system(postgres_client + "pg_dump -Fc --no-acl -f " + file_name + " -d " + database_uri) - print("Wrote pg dumpfile to {}".format(file_name)) + cmd = postgres_client + "pg_dump -Fc --no-acl -f " + file_name + " -d " + database_uri + logger.info(f"Executing backup command: {cmd}") + os.system(cmd) + logger.info("Wrote pg dumpfile to {}".format(file_name)) return True except Exception as e: - print(e) - return False + logger.error(f"Caught Exception while backing up database. Exception: {e}") + raise e def restore_database(file_name, postgres_client, database_uri): @@ -118,10 +121,11 @@ def upload_file(file_name, bucket, sys_values, object_name=None, region='us-gov- if object_name is None: object_name = os.path.basename(file_name) + logger.info(f"Uploading {file_name} to S3.") s3_client = boto3.client('s3', region_name=sys_values['S3_REGION']) s3_client.upload_file(file_name, bucket, object_name) - print("Uploaded {} to S3:{}{}".format(file_name, bucket, object_name)) + logger.info("Uploaded {} to S3:{}{}.".format(file_name, bucket, object_name)) return True @@ -210,6 +214,7 @@ def main(argv, sys_values): sys_values=sys_values, region=sys_values['S3_REGION'], object_name="backup"+arg_file) + logger.info(f"Deleting {arg_file} from local storage.") os.system('rm ' + arg_file) elif arg_to_restore: @@ -232,7 +237,20 @@ def run_backup(arg): if settings.USE_LOCALSTACK is True: logger.info("Won't backup locally") else: - main([arg], sys_values=get_system_values()) + try: + main([arg], sys_values=get_system_values()) + except Exception as e: + logger.error(f"Caught Exception in run_backup. Exception: {e}.") + LogEntry.objects.log_action( + user_id=None, + content_type_id=None, + object_id=None, + object_repr=None, + action_flag=ADDITION, + change_message=f"Database task with arg: {arg} failed with Exception: {e}.", + ) + return False + return True if __name__ == '__main__': diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index c14faebf2..92bc57825 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -2,6 +2,7 @@ from __future__ import absolute_import from tdpservice.users.models import User, AccountApprovalStatusChoices +from django.contrib.admin.models import LogEntry, ADDITION from django.contrib.auth.models import Group from django.conf import settings from django.urls import reverse @@ -20,7 +21,26 @@ def postgres_backup(*args): """Run nightly postgres backup.""" arg = ''.join(args) logger.debug("postgres_backup::run_backup() run with arg: " + arg) - run_backup(arg) + logger.info("Begining database backup.") + LogEntry.objects.log_action( + user_id=None, + content_type_id=None, + object_id=None, + object_repr=None, + action_flag=ADDITION, + change_message="Begining database backup.", + ) + result = run_backup(arg) + if result: + logger.info("Finished database backup.") + LogEntry.objects.log_action( + user_id=None, + content_type_id=None, + object_id=None, + object_repr=None, + action_flag=ADDITION, + change_message="Finished database backup.", + ) return True @shared_task diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index 7d9126716..d58ffc932 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -454,7 +454,7 @@ class Common(Configuration): CELERY_BEAT_SCHEDULE = { 'name': { 'task': 'tdpservice.scheduling.tasks.postgres_backup', - 'schedule': crontab(minute='0', hour='4'), # Runs at midnight EST + 'schedule': crontab(minute='*/15'), # Runs at midnight EST 'args': "-b", 'options': { 'expires': 15.0, From 9a94eb1f7f530cc2bf7bb26384ad22880def86e6 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 16 Feb 2024 15:44:51 -0700 Subject: [PATCH 03/38] - fix lint --- tdrs-backend/tdpservice/scheduling/db_backup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 7675785e9..e305ea7e2 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -7,7 +7,6 @@ import getopt import json import os -import subprocess import sys from django.conf import settings from django.contrib.admin.models import LogEntry, ADDITION From 9e670bda278ec535aa4a32e9ec9c1c7534ed6e78 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 16 Feb 2024 16:18:59 -0700 Subject: [PATCH 04/38] - naming crontabs for easy understanding --- tdrs-backend/tdpservice/settings/common.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index d58ffc932..c66013da6 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -452,15 +452,15 @@ class Common(Configuration): CELERY_TIMEZONE = 'UTC' CELERY_BEAT_SCHEDULE = { - 'name': { + 'Database Backup': { 'task': 'tdpservice.scheduling.tasks.postgres_backup', - 'schedule': crontab(minute='*/15'), # Runs at midnight EST + 'schedule': crontab(minute='*/10'), # Runs at midnight EST 'args': "-b", 'options': { 'expires': 15.0, }, }, - 'name': { + 'Account Deactivation Warning': { 'task': 'tdpservice.scheduling.tasks.check_for_accounts_needing_deactivation_warning', 'schedule': crontab(day_of_week='*', hour='13', minute='0'), # Every day at 1pm UTC (9am EST) From fe653b00e5cfe2e70a611bc0f91c6d4e65920ac8 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 16 Feb 2024 16:45:42 -0700 Subject: [PATCH 05/38] - Removing log entries --- tdrs-backend/tdpservice/scheduling/db_backup.py | 9 --------- tdrs-backend/tdpservice/scheduling/tasks.py | 17 ----------------- 2 files changed, 26 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index e305ea7e2..9ab1752e3 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -9,7 +9,6 @@ import os import sys from django.conf import settings -from django.contrib.admin.models import LogEntry, ADDITION import boto3 import logging @@ -240,14 +239,6 @@ def run_backup(arg): main([arg], sys_values=get_system_values()) except Exception as e: logger.error(f"Caught Exception in run_backup. Exception: {e}.") - LogEntry.objects.log_action( - user_id=None, - content_type_id=None, - object_id=None, - object_repr=None, - action_flag=ADDITION, - change_message=f"Database task with arg: {arg} failed with Exception: {e}.", - ) return False return True diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index 92bc57825..bb9349784 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -2,7 +2,6 @@ from __future__ import absolute_import from tdpservice.users.models import User, AccountApprovalStatusChoices -from django.contrib.admin.models import LogEntry, ADDITION from django.contrib.auth.models import Group from django.conf import settings from django.urls import reverse @@ -22,25 +21,9 @@ def postgres_backup(*args): arg = ''.join(args) logger.debug("postgres_backup::run_backup() run with arg: " + arg) logger.info("Begining database backup.") - LogEntry.objects.log_action( - user_id=None, - content_type_id=None, - object_id=None, - object_repr=None, - action_flag=ADDITION, - change_message="Begining database backup.", - ) result = run_backup(arg) if result: logger.info("Finished database backup.") - LogEntry.objects.log_action( - user_id=None, - content_type_id=None, - object_id=None, - object_repr=None, - action_flag=ADDITION, - change_message="Finished database backup.", - ) return True @shared_task From d7b30406210620afdec2dec4125384f3126aba94 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Fri, 16 Feb 2024 17:45:03 -0700 Subject: [PATCH 06/38] - testing old method --- tdrs-backend/tdpservice/scheduling/db_backup.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 9ab1752e3..e356f3a55 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -8,6 +8,7 @@ import json import os import sys +import subprocess from django.conf import settings import boto3 import logging @@ -25,7 +26,19 @@ def get_system_values(): sys_values['SPACE'] = json.loads(OS_ENV['VCAP_APPLICATION'])['space_name'] # Postgres client pg_dump directory - sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/bin/" + pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], + stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) + pgdump_search.wait() + pg_dump_paths, pgdump_search_error = pgdump_search.communicate() + pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') + if pg_dump_paths[0] == '': + raise Exception("Postgres client is not found") + + for _ in pg_dump_paths: + if 'pg_dump' in str(_) and 'postgresql' in str(_): + sys_values['POSTGRES_CLIENT_DIR'] = _[:_.find('pg_dump')] + logger.info("Found PG client here: {}".format(_)) + # sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/bin/" sys_values['S3_ENV_VARS'] = json.loads(OS_ENV['VCAP_SERVICES'])['s3'] sys_values['S3_CREDENTIALS'] = sys_values['S3_ENV_VARS'][0]['credentials'] From 2097d8dbd8ac9c1f5df21ecefc88dc9dd3bdc79c Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 09:06:58 -0700 Subject: [PATCH 07/38] - installing postgres client 15 --- .circleci/deployment/commands.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.circleci/deployment/commands.yml b/.circleci/deployment/commands.yml index 347f119b5..3fdf98db1 100644 --- a/.circleci/deployment/commands.yml +++ b/.circleci/deployment/commands.yml @@ -97,6 +97,10 @@ sudo apt update sudo add-apt-repository ppa:rmescandon/yq sudo apt-get install yq + sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' + wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - + sudo apt-get update + sudo apt-get -y install postgresql-client-15 - run: name: Deploy backend application command: | From 687c8d293d7ecf71be0503e7b837b9509db7e502 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 10:03:32 -0700 Subject: [PATCH 08/38] - print paths to see whats up --- tdrs-backend/tdpservice/scheduling/db_backup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index e356f3a55..d57b8fb5e 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -33,6 +33,8 @@ def get_system_values(): pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') if pg_dump_paths[0] == '': raise Exception("Postgres client is not found") + + logger.info(f"\n\nPG_DUMP PATHS: {pg_dump_paths}\n\n") for _ in pg_dump_paths: if 'pg_dump' in str(_) and 'postgresql' in str(_): From 27ff2db06b030fedcf09e52011486ef9df88c93f Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 10:13:46 -0700 Subject: [PATCH 09/38] - fix lint --- tdrs-backend/tdpservice/scheduling/db_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index d57b8fb5e..b52bbeffb 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -33,7 +33,7 @@ def get_system_values(): pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') if pg_dump_paths[0] == '': raise Exception("Postgres client is not found") - + logger.info(f"\n\nPG_DUMP PATHS: {pg_dump_paths}\n\n") for _ in pg_dump_paths: From 76c4b9df5d823ea50f62516c5bf8d4278e914b09 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 10:46:51 -0700 Subject: [PATCH 10/38] - remove all traces of postgres before installing new postgres --- .circleci/deployment/commands.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/deployment/commands.yml b/.circleci/deployment/commands.yml index 3fdf98db1..e4bd9db29 100644 --- a/.circleci/deployment/commands.yml +++ b/.circleci/deployment/commands.yml @@ -97,6 +97,8 @@ sudo apt update sudo add-apt-repository ppa:rmescandon/yq sudo apt-get install yq + sudo apt-get --purge remove postgresql postgresql-* + sudo apt-get update sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - sudo apt-get update From a2a073be3b8bd3ded4877eb6a621918de1ef023f Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 10:48:57 -0700 Subject: [PATCH 11/38] - disabling tests for speed --- .circleci/build-and-test/jobs.yml | 102 +++++++++++++++--------------- 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/.circleci/build-and-test/jobs.yml b/.circleci/build-and-test/jobs.yml index 5e58a99ae..8a07beb8c 100644 --- a/.circleci/build-and-test/jobs.yml +++ b/.circleci/build-and-test/jobs.yml @@ -5,17 +5,17 @@ - checkout - docker-compose-check - docker-compose-up-with-elastic-backend - - run: - name: Run Unit Tests And Create Code Coverage Report - command: | - cd tdrs-backend; - docker-compose run --rm web bash -c "./wait_for_services.sh && pytest --cov-report=xml" - - run: - name: Execute Python Linting Test - command: cd tdrs-backend; docker-compose run --rm web bash -c "flake8 ." - - upload-codecov: - component: backend - coverage-report: ./tdrs-backend/coverage.xml + # - run: + # name: Run Unit Tests And Create Code Coverage Report + # command: | + # cd tdrs-backend; + # docker-compose run --rm web bash -c "./wait_for_services.sh && pytest --cov-report=xml" + # - run: + # name: Execute Python Linting Test + # command: cd tdrs-backend; docker-compose run --rm web bash -c "flake8 ." + # - upload-codecov: + # component: backend + # coverage-report: ./tdrs-backend/coverage.xml test-frontend: executor: machine-executor @@ -24,22 +24,22 @@ - checkout - install-nodejs-machine - disable-npm-audit - - install-nodejs-packages: - app-dir: tdrs-frontend - - run: - name: Run ESLint - command: cd tdrs-frontend; npm run lint - - run: - name: Run Pa11y Accessibility Tests - command: cd tdrs-frontend; mkdir pa11y-screenshots/; npm run test:accessibility - - run: - name: Run Jest Unit Tests - command: cd tdrs-frontend; npm run test:ci - - upload-codecov: - component: frontend - coverage-report: ./tdrs-frontend/coverage/lcov.info - - store_artifacts: - path: tdrs-frontend/pa11y-screenshots/ + # - install-nodejs-packages: + # app-dir: tdrs-frontend + # - run: + # name: Run ESLint + # command: cd tdrs-frontend; npm run lint + # - run: + # name: Run Pa11y Accessibility Tests + # command: cd tdrs-frontend; mkdir pa11y-screenshots/; npm run test:accessibility + # - run: + # name: Run Jest Unit Tests + # command: cd tdrs-frontend; npm run test:ci + # - upload-codecov: + # component: frontend + # coverage-report: ./tdrs-frontend/coverage/lcov.info + # - store_artifacts: + # path: tdrs-frontend/pa11y-screenshots/ test-e2e: executor: large-machine-executor @@ -51,30 +51,30 @@ - docker-compose-up-frontend - install-nodejs-machine - disable-npm-audit - - install-nodejs-packages: - app-dir: tdrs-frontend - - run: - name: Wait for backend to become available - command: cd tdrs-backend; docker-compose run --rm zaproxy bash -c \ - "PATH=$PATH:/home/zap/.local/bin && - pip install wait-for-it && - wait-for-it --service http://web:8080 --timeout 180 -- echo \"Django is ready\"" - - run: - name: apply the migrations - command: cd tdrs-backend; docker-compose exec web bash -c "python manage.py makemigrations; python manage.py migrate" - - run: - name: Remove existing cypress test users - command: cd tdrs-backend; docker-compose exec web python manage.py delete_cypress_users -usernames new-cypress@teamraft.com cypress-admin@teamraft.com - - run: - name: Setup cypress test users - command: cd tdrs-backend; docker-compose exec web python manage.py generate_cypress_users - - run: - name: Run Cypress e2e tests - command: cd tdrs-frontend; npm run test:e2e-ci - - store_artifacts: - path: tdrs-frontend/cypress/screenshots/ - - store_artifacts: - path: tdrs-frontend/cypress/videos/ + # - install-nodejs-packages: + # app-dir: tdrs-frontend + # - run: + # name: Wait for backend to become available + # command: cd tdrs-backend; docker-compose run --rm zaproxy bash -c \ + # "PATH=$PATH:/home/zap/.local/bin && + # pip install wait-for-it && + # wait-for-it --service http://web:8080 --timeout 180 -- echo \"Django is ready\"" + # - run: + # name: apply the migrations + # command: cd tdrs-backend; docker-compose exec web bash -c "python manage.py makemigrations; python manage.py migrate" + # - run: + # name: Remove existing cypress test users + # command: cd tdrs-backend; docker-compose exec web python manage.py delete_cypress_users -usernames new-cypress@teamraft.com cypress-admin@teamraft.com + # - run: + # name: Setup cypress test users + # command: cd tdrs-backend; docker-compose exec web python manage.py generate_cypress_users + # - run: + # name: Run Cypress e2e tests + # command: cd tdrs-frontend; npm run test:e2e-ci + # - store_artifacts: + # path: tdrs-frontend/cypress/screenshots/ + # - store_artifacts: + # path: tdrs-frontend/cypress/videos/ secrets-check: executor: docker-executor From e6a84a80958dcf1f44d99dfcb9aba02016dc82fd Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 11:23:51 -0700 Subject: [PATCH 12/38] Revert "- remove all traces of postgres before installing new postgres" This reverts commit 76c4b9df5d823ea50f62516c5bf8d4278e914b09. --- .circleci/deployment/commands.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.circleci/deployment/commands.yml b/.circleci/deployment/commands.yml index e4bd9db29..3fdf98db1 100644 --- a/.circleci/deployment/commands.yml +++ b/.circleci/deployment/commands.yml @@ -97,8 +97,6 @@ sudo apt update sudo add-apt-repository ppa:rmescandon/yq sudo apt-get install yq - sudo apt-get --purge remove postgresql postgresql-* - sudo apt-get update sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - sudo apt-get update From 64c69075bb0119ce1dab98e7976de67b86c4cd92 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 11:24:03 -0700 Subject: [PATCH 13/38] Revert "- fix lint" This reverts commit 27ff2db06b030fedcf09e52011486ef9df88c93f. --- tdrs-backend/tdpservice/scheduling/db_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index b52bbeffb..d57b8fb5e 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -33,7 +33,7 @@ def get_system_values(): pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') if pg_dump_paths[0] == '': raise Exception("Postgres client is not found") - + logger.info(f"\n\nPG_DUMP PATHS: {pg_dump_paths}\n\n") for _ in pg_dump_paths: From 5a2153ad9b7c6f24e619a81994c7badc2f3c8bff Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 11:24:13 -0700 Subject: [PATCH 14/38] Revert "- installing postgres client 15" This reverts commit 2097d8dbd8ac9c1f5df21ecefc88dc9dd3bdc79c. --- .circleci/deployment/commands.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.circleci/deployment/commands.yml b/.circleci/deployment/commands.yml index 3fdf98db1..347f119b5 100644 --- a/.circleci/deployment/commands.yml +++ b/.circleci/deployment/commands.yml @@ -97,10 +97,6 @@ sudo apt update sudo add-apt-repository ppa:rmescandon/yq sudo apt-get install yq - sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - - sudo apt-get update - sudo apt-get -y install postgresql-client-15 - run: name: Deploy backend application command: | From 1ce4d815613ccff3d3590d62229927cc552d63ca Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 11:24:22 -0700 Subject: [PATCH 15/38] Revert "Revert "- fix lint"" This reverts commit 64c69075bb0119ce1dab98e7976de67b86c4cd92. --- tdrs-backend/tdpservice/scheduling/db_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index d57b8fb5e..b52bbeffb 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -33,7 +33,7 @@ def get_system_values(): pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') if pg_dump_paths[0] == '': raise Exception("Postgres client is not found") - + logger.info(f"\n\nPG_DUMP PATHS: {pg_dump_paths}\n\n") for _ in pg_dump_paths: From 3b73f8df15844a37a4045420ce8c857add1a3957 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 11:25:02 -0700 Subject: [PATCH 16/38] - Add correct client to apt.yml --- tdrs-backend/apt.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/apt.yml b/tdrs-backend/apt.yml index f07aee4a3..5f638f497 100644 --- a/tdrs-backend/apt.yml +++ b/tdrs-backend/apt.yml @@ -4,6 +4,6 @@ keys: repos: - deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main packages: - - postgresql-client-12 + - postgresql-client-15 - libjemalloc-dev - redis From 30c3e51011941b7ef4199b4919f232a3ee04c257 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 11:40:10 -0700 Subject: [PATCH 17/38] - making tests even shorter --- .circleci/build-and-test/jobs.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.circleci/build-and-test/jobs.yml b/.circleci/build-and-test/jobs.yml index 8a07beb8c..b9a0c54d4 100644 --- a/.circleci/build-and-test/jobs.yml +++ b/.circleci/build-and-test/jobs.yml @@ -3,8 +3,8 @@ executor: machine-executor steps: - checkout - - docker-compose-check - - docker-compose-up-with-elastic-backend + # - docker-compose-check + # - docker-compose-up-with-elastic-backend # - run: # name: Run Unit Tests And Create Code Coverage Report # command: | @@ -22,8 +22,8 @@ working_directory: ~/tdp-apps steps: - checkout - - install-nodejs-machine - - disable-npm-audit + # - install-nodejs-machine + # - disable-npm-audit # - install-nodejs-packages: # app-dir: tdrs-frontend # - run: @@ -46,11 +46,11 @@ working_directory: ~/tdp-apps steps: - checkout - - docker-compose-check - - docker-compose-up-with-elastic-backend - - docker-compose-up-frontend - - install-nodejs-machine - - disable-npm-audit + # - docker-compose-check + # - docker-compose-up-with-elastic-backend + # - docker-compose-up-frontend + # - install-nodejs-machine + # - disable-npm-audit # - install-nodejs-packages: # app-dir: tdrs-frontend # - run: From 72ad9360dbc4e6e85b90869c8c228236c82b577f Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 12:00:45 -0700 Subject: [PATCH 18/38] - trying clietn V14 --- tdrs-backend/apt.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/apt.yml b/tdrs-backend/apt.yml index 5f638f497..f4460200f 100644 --- a/tdrs-backend/apt.yml +++ b/tdrs-backend/apt.yml @@ -4,6 +4,6 @@ keys: repos: - deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main packages: - - postgresql-client-15 + - postgresql-client-14 - libjemalloc-dev - redis From a2f94d3c406384b3ab10eecc3d09ab6c71c228fc Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 12:20:27 -0700 Subject: [PATCH 19/38] - removing from apt and installing manually --- .circleci/deployment/commands.yml | 4 ++++ tdrs-backend/apt.yml | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.circleci/deployment/commands.yml b/.circleci/deployment/commands.yml index 347f119b5..3fdf98db1 100644 --- a/.circleci/deployment/commands.yml +++ b/.circleci/deployment/commands.yml @@ -97,6 +97,10 @@ sudo apt update sudo add-apt-repository ppa:rmescandon/yq sudo apt-get install yq + sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' + wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - + sudo apt-get update + sudo apt-get -y install postgresql-client-15 - run: name: Deploy backend application command: | diff --git a/tdrs-backend/apt.yml b/tdrs-backend/apt.yml index f4460200f..862f22477 100644 --- a/tdrs-backend/apt.yml +++ b/tdrs-backend/apt.yml @@ -4,6 +4,5 @@ keys: repos: - deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main packages: - - postgresql-client-14 - libjemalloc-dev - redis From d65d6ecef139ef870f78bdb4bce0034c68ff9151 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 12:32:35 -0700 Subject: [PATCH 20/38] Revert "- removing from apt and installing manually" This reverts commit a2f94d3c406384b3ab10eecc3d09ab6c71c228fc. --- .circleci/deployment/commands.yml | 4 ---- tdrs-backend/apt.yml | 1 + 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.circleci/deployment/commands.yml b/.circleci/deployment/commands.yml index 3fdf98db1..347f119b5 100644 --- a/.circleci/deployment/commands.yml +++ b/.circleci/deployment/commands.yml @@ -97,10 +97,6 @@ sudo apt update sudo add-apt-repository ppa:rmescandon/yq sudo apt-get install yq - sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - - sudo apt-get update - sudo apt-get -y install postgresql-client-15 - run: name: Deploy backend application command: | diff --git a/tdrs-backend/apt.yml b/tdrs-backend/apt.yml index 862f22477..f4460200f 100644 --- a/tdrs-backend/apt.yml +++ b/tdrs-backend/apt.yml @@ -4,5 +4,6 @@ keys: repos: - deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main packages: + - postgresql-client-14 - libjemalloc-dev - redis From 2d411640ce787ee011747d97690a13f09ae00918 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 12:32:57 -0700 Subject: [PATCH 21/38] - revert --- tdrs-backend/apt.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/apt.yml b/tdrs-backend/apt.yml index f4460200f..5f638f497 100644 --- a/tdrs-backend/apt.yml +++ b/tdrs-backend/apt.yml @@ -4,6 +4,6 @@ keys: repos: - deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main packages: - - postgresql-client-14 + - postgresql-client-15 - libjemalloc-dev - redis From 68b9f8c2251448c09968e7c27357db9c6f58c1e2 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 13:53:01 -0700 Subject: [PATCH 22/38] - Version 12 in apt.yml - Tell terraform to specify db version --- tdrs-backend/apt.yml | 2 +- terraform/dev/main.tf | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tdrs-backend/apt.yml b/tdrs-backend/apt.yml index 5f638f497..f07aee4a3 100644 --- a/tdrs-backend/apt.yml +++ b/tdrs-backend/apt.yml @@ -4,6 +4,6 @@ keys: repos: - deb http://apt.postgresql.org/pub/repos/apt/ bookworm-pgdg main packages: - - postgresql-client-15 + - postgresql-client-12 - libjemalloc-dev - redis diff --git a/terraform/dev/main.tf b/terraform/dev/main.tf index da1df5b10..8e4b61c1a 100644 --- a/terraform/dev/main.tf +++ b/terraform/dev/main.tf @@ -52,6 +52,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-dev" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["micro-psql"] + json_params = '{"version": "12"}' recursive_delete = true } From 9faa5260762b3322b532c5ca8f95e9be71f69784 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 13:58:36 -0700 Subject: [PATCH 23/38] - escaping quotes --- terraform/dev/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform/dev/main.tf b/terraform/dev/main.tf index 8e4b61c1a..e65d8e8c7 100644 --- a/terraform/dev/main.tf +++ b/terraform/dev/main.tf @@ -52,7 +52,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-dev" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["micro-psql"] - json_params = '{"version": "12"}' + json_params = "{\"version\": \"12\"}" recursive_delete = true } From b6069339e916cc98d20ab3d83c5c32a10fe47d6f Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 15:47:29 -0700 Subject: [PATCH 24/38] - forcing db name --- tdrs-backend/tdpservice/settings/cloudgov.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/settings/cloudgov.py b/tdrs-backend/tdpservice/settings/cloudgov.py index b00f76fa9..f01369cf0 100644 --- a/tdrs-backend/tdpservice/settings/cloudgov.py +++ b/tdrs-backend/tdpservice/settings/cloudgov.py @@ -74,7 +74,7 @@ class CloudGov(Common): if (cloudgov_space_suffix in ["prod", "staging"]): db_name = database_creds['db_name'] else: - db_name = env_based_db_name + db_name = 'cgawsbrokerprod2ej0zsae8fxqb7r' DATABASES = { 'default': { From 73a92a8a5f4973a6c725893e98ada4bc9d6ea77c Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Tue, 20 Feb 2024 16:04:01 -0700 Subject: [PATCH 25/38] Revert "- forcing db name" This reverts commit b6069339e916cc98d20ab3d83c5c32a10fe47d6f. --- tdrs-backend/tdpservice/settings/cloudgov.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/settings/cloudgov.py b/tdrs-backend/tdpservice/settings/cloudgov.py index f01369cf0..b00f76fa9 100644 --- a/tdrs-backend/tdpservice/settings/cloudgov.py +++ b/tdrs-backend/tdpservice/settings/cloudgov.py @@ -74,7 +74,7 @@ class CloudGov(Common): if (cloudgov_space_suffix in ["prod", "staging"]): db_name = database_creds['db_name'] else: - db_name = 'cgawsbrokerprod2ej0zsae8fxqb7r' + db_name = env_based_db_name DATABASES = { 'default': { From 447a774a140bcad1b8c56c608e9689f06cb826cb Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Wed, 21 Feb 2024 08:14:17 -0700 Subject: [PATCH 26/38] - logging - every 5 min --- tdrs-backend/tdpservice/scheduling/db_backup.py | 5 +++-- tdrs-backend/tdpservice/settings/common.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index b52bbeffb..32407c3b2 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -137,8 +137,9 @@ def upload_file(file_name, bucket, sys_values, object_name=None, region='us-gov- logger.info(f"Uploading {file_name} to S3.") s3_client = boto3.client('s3', region_name=sys_values['S3_REGION']) - s3_client.upload_file(file_name, bucket, object_name) - logger.info("Uploaded {} to S3:{}{}.".format(file_name, bucket, object_name)) + response = s3_client.upload_file(file_name, bucket, object_name) + logger.info(f"S3 upload response: {response}") + logger.info("Uploaded {} to s3://{}/{}.".format(file_name, bucket, object_name)) return True diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index c66013da6..2a85a6ab3 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -454,7 +454,7 @@ class Common(Configuration): CELERY_BEAT_SCHEDULE = { 'Database Backup': { 'task': 'tdpservice.scheduling.tasks.postgres_backup', - 'schedule': crontab(minute='*/10'), # Runs at midnight EST + 'schedule': crontab(minute='*/5'), # Runs at midnight EST 'args': "-b", 'options': { 'expires': 15.0, From b3f02458ed321b0a93115fed6eb697dbedde668e Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Wed, 21 Feb 2024 08:49:26 -0700 Subject: [PATCH 27/38] - more logging --- tdrs-backend/tdpservice/scheduling/db_backup.py | 15 ++++++++++++--- tdrs-backend/tdpservice/scheduling/tasks.py | 2 ++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 32407c3b2..45e8fb130 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -89,6 +89,8 @@ def backup_database(file_name, logger.info(f"Executing backup command: {cmd}") os.system(cmd) logger.info("Wrote pg dumpfile to {}".format(file_name)) + file_size = os.path.getsize(file_name) + logger.info(f"Pg dumpfile size in bytes: {file_size}.") return True except Exception as e: logger.error(f"Caught Exception while backing up database. Exception: {e}") @@ -105,10 +107,12 @@ def restore_database(file_name, postgres_client, database_uri): DATABASE_DB_NAME] = get_database_credentials(database_uri) os.environ['PGPASSWORD'] = DATABASE_PASSWORD try: + logger.info("Begining database creation.") os.system(postgres_client + "createdb " + "-U " + DATABASE_USERNAME + " -h " + DATABASE_HOST + " -T template0 " + DATABASE_DB_NAME) + logger.info("Completed database creation.") except Exception as e: - print(e) + logger.error(f"Caught exception while creating the database. Exception: {e}.") return False # write .pgpass @@ -117,8 +121,10 @@ def restore_database(file_name, postgres_client, database_uri): os.environ['PGPASSFILE'] = '/home/vcap/.pgpass' os.system('chmod 0600 /home/vcap/.pgpass') + logger.info("Begining database restoration.") os.system(postgres_client + "pg_restore" + " -p " + DATABASE_PORT + " -h " + DATABASE_HOST + " -U " + DATABASE_USERNAME + " -d " + DATABASE_DB_NAME + " " + file_name) + logger.info("Completed database restoration.") return True @@ -157,9 +163,11 @@ def download_file(bucket, """ if object_name is None: object_name = os.path.basename(file_name) + logger.info("Begining download for backup file.") s3 = boto3.client('s3', region_name=region) - s3.download_file(bucket, object_name, file_name) - print("Downloaded s3 file {}{} to {}.".format(bucket, object_name, file_name)) + response = s3.download_file(bucket, object_name, file_name) + logger.info(f"Response from s3 download: {response}.") + logger.info("Downloaded s3 file {}/{} to {}.".format(bucket, object_name, file_name)) def list_s3_files(sys_values): @@ -243,6 +251,7 @@ def main(argv, sys_values): postgres_client=sys_values['POSTGRES_CLIENT_DIR'], database_uri=arg_database) + logger.info(f"Deleting {arg_file} from local storage.") os.system('rm ' + arg_file) diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index bb9349784..24c394ddd 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -24,6 +24,8 @@ def postgres_backup(*args): result = run_backup(arg) if result: logger.info("Finished database backup.") + else: + logger.error("Failed to complete database backup.") return True @shared_task From 460005699e4030b9e302fecca847a9c7d6dba441 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Wed, 21 Feb 2024 08:59:41 -0700 Subject: [PATCH 28/38] - Cleanup debug code - update tf environments to force version 12 of pg server --- .circleci/build-and-test/jobs.yml | 120 +++++++++--------- .../tdpservice/scheduling/db_backup.py | 16 +-- tdrs-backend/tdpservice/scheduling/tasks.py | 2 +- tdrs-backend/tdpservice/settings/common.py | 2 +- terraform/production/main.tf | 1 + terraform/staging/main.tf | 1 + 6 files changed, 65 insertions(+), 77 deletions(-) diff --git a/.circleci/build-and-test/jobs.yml b/.circleci/build-and-test/jobs.yml index b9a0c54d4..5e58a99ae 100644 --- a/.circleci/build-and-test/jobs.yml +++ b/.circleci/build-and-test/jobs.yml @@ -3,78 +3,78 @@ executor: machine-executor steps: - checkout - # - docker-compose-check - # - docker-compose-up-with-elastic-backend - # - run: - # name: Run Unit Tests And Create Code Coverage Report - # command: | - # cd tdrs-backend; - # docker-compose run --rm web bash -c "./wait_for_services.sh && pytest --cov-report=xml" - # - run: - # name: Execute Python Linting Test - # command: cd tdrs-backend; docker-compose run --rm web bash -c "flake8 ." - # - upload-codecov: - # component: backend - # coverage-report: ./tdrs-backend/coverage.xml + - docker-compose-check + - docker-compose-up-with-elastic-backend + - run: + name: Run Unit Tests And Create Code Coverage Report + command: | + cd tdrs-backend; + docker-compose run --rm web bash -c "./wait_for_services.sh && pytest --cov-report=xml" + - run: + name: Execute Python Linting Test + command: cd tdrs-backend; docker-compose run --rm web bash -c "flake8 ." + - upload-codecov: + component: backend + coverage-report: ./tdrs-backend/coverage.xml test-frontend: executor: machine-executor working_directory: ~/tdp-apps steps: - checkout - # - install-nodejs-machine - # - disable-npm-audit - # - install-nodejs-packages: - # app-dir: tdrs-frontend - # - run: - # name: Run ESLint - # command: cd tdrs-frontend; npm run lint - # - run: - # name: Run Pa11y Accessibility Tests - # command: cd tdrs-frontend; mkdir pa11y-screenshots/; npm run test:accessibility - # - run: - # name: Run Jest Unit Tests - # command: cd tdrs-frontend; npm run test:ci - # - upload-codecov: - # component: frontend - # coverage-report: ./tdrs-frontend/coverage/lcov.info - # - store_artifacts: - # path: tdrs-frontend/pa11y-screenshots/ + - install-nodejs-machine + - disable-npm-audit + - install-nodejs-packages: + app-dir: tdrs-frontend + - run: + name: Run ESLint + command: cd tdrs-frontend; npm run lint + - run: + name: Run Pa11y Accessibility Tests + command: cd tdrs-frontend; mkdir pa11y-screenshots/; npm run test:accessibility + - run: + name: Run Jest Unit Tests + command: cd tdrs-frontend; npm run test:ci + - upload-codecov: + component: frontend + coverage-report: ./tdrs-frontend/coverage/lcov.info + - store_artifacts: + path: tdrs-frontend/pa11y-screenshots/ test-e2e: executor: large-machine-executor working_directory: ~/tdp-apps steps: - checkout - # - docker-compose-check - # - docker-compose-up-with-elastic-backend - # - docker-compose-up-frontend - # - install-nodejs-machine - # - disable-npm-audit - # - install-nodejs-packages: - # app-dir: tdrs-frontend - # - run: - # name: Wait for backend to become available - # command: cd tdrs-backend; docker-compose run --rm zaproxy bash -c \ - # "PATH=$PATH:/home/zap/.local/bin && - # pip install wait-for-it && - # wait-for-it --service http://web:8080 --timeout 180 -- echo \"Django is ready\"" - # - run: - # name: apply the migrations - # command: cd tdrs-backend; docker-compose exec web bash -c "python manage.py makemigrations; python manage.py migrate" - # - run: - # name: Remove existing cypress test users - # command: cd tdrs-backend; docker-compose exec web python manage.py delete_cypress_users -usernames new-cypress@teamraft.com cypress-admin@teamraft.com - # - run: - # name: Setup cypress test users - # command: cd tdrs-backend; docker-compose exec web python manage.py generate_cypress_users - # - run: - # name: Run Cypress e2e tests - # command: cd tdrs-frontend; npm run test:e2e-ci - # - store_artifacts: - # path: tdrs-frontend/cypress/screenshots/ - # - store_artifacts: - # path: tdrs-frontend/cypress/videos/ + - docker-compose-check + - docker-compose-up-with-elastic-backend + - docker-compose-up-frontend + - install-nodejs-machine + - disable-npm-audit + - install-nodejs-packages: + app-dir: tdrs-frontend + - run: + name: Wait for backend to become available + command: cd tdrs-backend; docker-compose run --rm zaproxy bash -c \ + "PATH=$PATH:/home/zap/.local/bin && + pip install wait-for-it && + wait-for-it --service http://web:8080 --timeout 180 -- echo \"Django is ready\"" + - run: + name: apply the migrations + command: cd tdrs-backend; docker-compose exec web bash -c "python manage.py makemigrations; python manage.py migrate" + - run: + name: Remove existing cypress test users + command: cd tdrs-backend; docker-compose exec web python manage.py delete_cypress_users -usernames new-cypress@teamraft.com cypress-admin@teamraft.com + - run: + name: Setup cypress test users + command: cd tdrs-backend; docker-compose exec web python manage.py generate_cypress_users + - run: + name: Run Cypress e2e tests + command: cd tdrs-frontend; npm run test:e2e-ci + - store_artifacts: + path: tdrs-frontend/cypress/screenshots/ + - store_artifacts: + path: tdrs-frontend/cypress/videos/ secrets-check: executor: docker-executor diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 45e8fb130..dfbef7e60 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -26,21 +26,7 @@ def get_system_values(): sys_values['SPACE'] = json.loads(OS_ENV['VCAP_APPLICATION'])['space_name'] # Postgres client pg_dump directory - pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], - stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) - pgdump_search.wait() - pg_dump_paths, pgdump_search_error = pgdump_search.communicate() - pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') - if pg_dump_paths[0] == '': - raise Exception("Postgres client is not found") - - logger.info(f"\n\nPG_DUMP PATHS: {pg_dump_paths}\n\n") - - for _ in pg_dump_paths: - if 'pg_dump' in str(_) and 'postgresql' in str(_): - sys_values['POSTGRES_CLIENT_DIR'] = _[:_.find('pg_dump')] - logger.info("Found PG client here: {}".format(_)) - # sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/bin/" + sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/apt/usr/lib/postgresql/12/bin/" sys_values['S3_ENV_VARS'] = json.loads(OS_ENV['VCAP_SERVICES'])['s3'] sys_values['S3_CREDENTIALS'] = sys_values['S3_ENV_VARS'][0]['credentials'] diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index 24c394ddd..cbb3c8f9f 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -26,7 +26,7 @@ def postgres_backup(*args): logger.info("Finished database backup.") else: logger.error("Failed to complete database backup.") - return True + return result @shared_task def check_for_accounts_needing_deactivation_warning(): diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index 2a85a6ab3..6fb4b33ba 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -454,7 +454,7 @@ class Common(Configuration): CELERY_BEAT_SCHEDULE = { 'Database Backup': { 'task': 'tdpservice.scheduling.tasks.postgres_backup', - 'schedule': crontab(minute='*/5'), # Runs at midnight EST + 'schedule': crontab(minute='0', hour='4'), # Runs at midnight EST 'args': "-b", 'options': { 'expires': 15.0, diff --git a/terraform/production/main.tf b/terraform/production/main.tf index 6948ecd72..9fb0e16ca 100644 --- a/terraform/production/main.tf +++ b/terraform/production/main.tf @@ -51,6 +51,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-prod" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["medium-psql"] + json_params = "{\"version\": \"12\"}" recursive_delete = true } diff --git a/terraform/staging/main.tf b/terraform/staging/main.tf index 7b6d45a1f..00186bb34 100644 --- a/terraform/staging/main.tf +++ b/terraform/staging/main.tf @@ -51,6 +51,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-staging" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["micro-psql"] + json_params = "{\"version\": \"12\"}" recursive_delete = true } From 2aeeac36fee0efbe70a7bad0edd9c57535312cd2 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Wed, 21 Feb 2024 09:10:10 -0700 Subject: [PATCH 29/38] - Fix lint --- tdrs-backend/tdpservice/scheduling/db_backup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index dfbef7e60..607cd8251 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -8,7 +8,6 @@ import json import os import sys -import subprocess from django.conf import settings import boto3 import logging From fcccf1a5c5f02344de0042a3483c80313511e784 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Thu, 22 Feb 2024 08:38:04 -0700 Subject: [PATCH 30/38] - Adding back client search if hardcoded path doesn't exist --- .../tdpservice/scheduling/db_backup.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 607cd8251..b725e852c 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -7,6 +7,7 @@ import getopt import json import os +import subprocess import sys from django.conf import settings import boto3 @@ -27,6 +28,25 @@ def get_system_values(): # Postgres client pg_dump directory sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/apt/usr/lib/postgresql/12/bin/" + # If the client directory and binaries don't exist, we need to find them. + if not (os.path.exists(sys_values['POSTGRES_CLIENT_DIR']) and + os.path.isfile(f"{sys_values['POSTGRES_CLIENT_DIR']}/pg_dump")): + logger.warning(f"Couldn't find postgres client binaries at the hardcoded path: + {sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") + pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], + stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) + pgdump_search.wait() + pg_dump_paths, pgdump_search_error = pgdump_search.communicate() + pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') + if pg_dump_paths[0] == '': + raise Exception("Postgres client is not found") + + for _ in pg_dump_paths: + if 'pg_dump' in str(_) and 'postgresql' in str(_): + sys_values['POSTGRES_CLIENT'] = _[:_.find('pg_dump')] + + logger.info(f"Using postgres client at: {sys_values['POSTGRES_CLIENT_DIR']}") + sys_values['S3_ENV_VARS'] = json.loads(OS_ENV['VCAP_SERVICES'])['s3'] sys_values['S3_CREDENTIALS'] = sys_values['S3_ENV_VARS'][0]['credentials'] sys_values['S3_URI'] = sys_values['S3_CREDENTIALS']['uri'] From 70d2bdfb229b18b7200b1baced14478817daed3d Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Thu, 22 Feb 2024 08:53:36 -0700 Subject: [PATCH 31/38] - fix syntax error --- tdrs-backend/tdpservice/scheduling/db_backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index b725e852c..90c3cae42 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -31,8 +31,8 @@ def get_system_values(): # If the client directory and binaries don't exist, we need to find them. if not (os.path.exists(sys_values['POSTGRES_CLIENT_DIR']) and os.path.isfile(f"{sys_values['POSTGRES_CLIENT_DIR']}/pg_dump")): - logger.warning(f"Couldn't find postgres client binaries at the hardcoded path: - {sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") + logger.warning(f"Couldn't find postgres client binaries at the hardcoded path: " + "{sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) pgdump_search.wait() From 9c697167625ff4009828d9c8a5323d11deef2558 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Thu, 22 Feb 2024 09:16:07 -0700 Subject: [PATCH 32/38] - fix lint --- tdrs-backend/tdpservice/scheduling/db_backup.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 90c3cae42..674600514 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -31,10 +31,10 @@ def get_system_values(): # If the client directory and binaries don't exist, we need to find them. if not (os.path.exists(sys_values['POSTGRES_CLIENT_DIR']) and os.path.isfile(f"{sys_values['POSTGRES_CLIENT_DIR']}/pg_dump")): - logger.warning(f"Couldn't find postgres client binaries at the hardcoded path: " - "{sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") + logger.warning("Couldn't find postgres client binaries at the hardcoded path: " + f"{sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], - stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) + stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) pgdump_search.wait() pg_dump_paths, pgdump_search_error = pgdump_search.communicate() pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') @@ -44,7 +44,7 @@ def get_system_values(): for _ in pg_dump_paths: if 'pg_dump' in str(_) and 'postgresql' in str(_): sys_values['POSTGRES_CLIENT'] = _[:_.find('pg_dump')] - + logger.info(f"Using postgres client at: {sys_values['POSTGRES_CLIENT_DIR']}") sys_values['S3_ENV_VARS'] = json.loads(OS_ENV['VCAP_SERVICES'])['s3'] From d597f4fb92312bdd95c48a6ff9d8413a315c16b8 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Thu, 22 Feb 2024 09:16:44 -0700 Subject: [PATCH 33/38] - remove extra slash --- tdrs-backend/tdpservice/scheduling/db_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 674600514..28cffd9df 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -30,7 +30,7 @@ def get_system_values(): # If the client directory and binaries don't exist, we need to find them. if not (os.path.exists(sys_values['POSTGRES_CLIENT_DIR']) and - os.path.isfile(f"{sys_values['POSTGRES_CLIENT_DIR']}/pg_dump")): + os.path.isfile(f"{sys_values['POSTGRES_CLIENT_DIR']}pg_dump")): logger.warning("Couldn't find postgres client binaries at the hardcoded path: " f"{sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], From 57fd3841d626fda7fa1d0a0faafcb8b38ba19dde Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Mon, 26 Feb 2024 11:06:45 -0700 Subject: [PATCH 34/38] - Adding log entries to backup task --- .../tdpservice/scheduling/db_backup.py | 154 +++++++++++++++--- 1 file changed, 131 insertions(+), 23 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 28cffd9df..8ff11e48c 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -12,12 +12,15 @@ from django.conf import settings import boto3 import logging +from tdpservice.users.models import User +from django.contrib.admin.models import ADDITION, ContentType, LogEntry logger = logging.getLogger(__name__) OS_ENV = os.environ +content_type = ContentType.objects.get_for_model(LogEntry) def get_system_values(): """Return dict of keys and settings to use whether local or deployed.""" @@ -81,7 +84,8 @@ def get_system_values(): def backup_database(file_name, postgres_client, - database_uri): + database_uri, + system_user): """Back up postgres database into file. :param file_name: back up file name @@ -93,7 +97,16 @@ def backup_database(file_name, cmd = postgres_client + "pg_dump -Fc --no-acl -f " + file_name + " -d " + database_uri logger.info(f"Executing backup command: {cmd}") os.system(cmd) - logger.info("Wrote pg dumpfile to {}".format(file_name)) + msg = "Successfully executed backup. Wrote pg dumpfile to {}".format(file_name) + logger.info(msg) + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed Database Backup", + action_flag=ADDITION, + change_message=msg + ) file_size = os.path.getsize(file_name) logger.info(f"Pg dumpfile size in bytes: {file_size}.") return True @@ -102,7 +115,7 @@ def backup_database(file_name, raise e -def restore_database(file_name, postgres_client, database_uri): +def restore_database(file_name, postgres_client, database_uri, system_user): """Restore the database from filename. :param file_name: database backup filename @@ -113,9 +126,20 @@ def restore_database(file_name, postgres_client, database_uri): os.environ['PGPASSWORD'] = DATABASE_PASSWORD try: logger.info("Begining database creation.") - os.system(postgres_client + "createdb " + "-U " + DATABASE_USERNAME + " -h " + DATABASE_HOST + " -T template0 " - + DATABASE_DB_NAME) - logger.info("Completed database creation.") + cmd = (postgres_client + "createdb " + "-U " + DATABASE_USERNAME + " -h " + DATABASE_HOST + " -T template0 " + + DATABASE_DB_NAME) + logger.info(f"Executing create command: {cmd}") + os.system(cmd) + msg = "Completed database creation." + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed Database create", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) except Exception as e: logger.error(f"Caught exception while creating the database. Exception: {e}.") return False @@ -127,13 +151,24 @@ def restore_database(file_name, postgres_client, database_uri): os.system('chmod 0600 /home/vcap/.pgpass') logger.info("Begining database restoration.") - os.system(postgres_client + "pg_restore" + " -p " + DATABASE_PORT + " -h " + - DATABASE_HOST + " -U " + DATABASE_USERNAME + " -d " + DATABASE_DB_NAME + " " + file_name) - logger.info("Completed database restoration.") + cmd = (postgres_client + "pg_restore" + " -p " + DATABASE_PORT + " -h " + + DATABASE_HOST + " -U " + DATABASE_USERNAME + " -d " + DATABASE_DB_NAME + " " + file_name) + logger.info(f"Executing restore command: {cmd}") + os.system(cmd) + msg = "Completed database restoration." + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed Database restore", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) return True -def upload_file(file_name, bucket, sys_values, object_name=None, region='us-gov-west-1'): +def upload_file(file_name, bucket, sys_values, system_user, object_name=None, region='us-gov-west-1'): """Upload a file to an S3 bucket. :param file_name: file name being uploaded to s3 bucket @@ -148,15 +183,24 @@ def upload_file(file_name, bucket, sys_values, object_name=None, region='us-gov- logger.info(f"Uploading {file_name} to S3.") s3_client = boto3.client('s3', region_name=sys_values['S3_REGION']) - response = s3_client.upload_file(file_name, bucket, object_name) - logger.info(f"S3 upload response: {response}") - logger.info("Uploaded {} to s3://{}/{}.".format(file_name, bucket, object_name)) + s3_client.upload_file(file_name, bucket, object_name) + msg = "Successfully uploaded {} to s3://{}/{}.".format(file_name, bucket, object_name) + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed database backup S3 upload", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) return True def download_file(bucket, file_name, region, + system_user, object_name=None, ): """Download file from s3 bucket.""" @@ -170,9 +214,17 @@ def download_file(bucket, object_name = os.path.basename(file_name) logger.info("Begining download for backup file.") s3 = boto3.client('s3', region_name=region) - response = s3.download_file(bucket, object_name, file_name) - logger.info(f"Response from s3 download: {response}.") - logger.info("Downloaded s3 file {}/{} to {}.".format(bucket, object_name, file_name)) + s3.download_file(bucket, object_name, file_name) + msg = "Successfully downloaded s3 file {}/{} to {}.".format(bucket, object_name, file_name) + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed database backup S3 download", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) def list_s3_files(sys_values): @@ -207,7 +259,7 @@ def get_database_credentials(database_uri): return [username, password, host, port, database_name] -def main(argv, sys_values): +def main(argv, sys_values, system_user): """Handle commandline args.""" arg_file = "/tmp/backup.pg" arg_database = sys_values['DATABASE_URI'] @@ -230,31 +282,73 @@ def main(argv, sys_values): raise e if arg_to_backup: + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Begining Database Backup", + action_flag=ADDITION, + change_message="Begining database backup." + ) # back up database backup_database(file_name=arg_file, postgres_client=sys_values['POSTGRES_CLIENT_DIR'], - database_uri=arg_database) + database_uri=arg_database, + system_user=system_user) # upload backup file upload_file(file_name=arg_file, bucket=sys_values['S3_BUCKET'], sys_values=sys_values, + system_user=system_user, region=sys_values['S3_REGION'], - object_name="backup"+arg_file) + object_name="backup"+arg_file, + ) + + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Finished Database Backup", + action_flag=ADDITION, + change_message="Finished database backup." + ) + logger.info(f"Deleting {arg_file} from local storage.") os.system('rm ' + arg_file) elif arg_to_restore: + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Begining Database Restore", + action_flag=ADDITION, + change_message="Begining database restore." + ) + # download file from s3 download_file(bucket=sys_values['S3_BUCKET'], file_name=arg_file, region=sys_values['S3_REGION'], - object_name="backup"+arg_file) + system_user=system_user, + object_name="backup"+arg_file, + ) # restore database restore_database(file_name=arg_file, postgres_client=sys_values['POSTGRES_CLIENT_DIR'], - database_uri=arg_database) + database_uri=arg_database, + system_user=system_user) + + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Finished Database Restore", + action_flag=ADDITION, + change_message="Finished database restore." + ) logger.info(f"Deleting {arg_file} from local storage.") os.system('rm ' + arg_file) @@ -266,12 +360,26 @@ def run_backup(arg): logger.info("Won't backup locally") else: try: - main([arg], sys_values=get_system_values()) + system_user, created = User.objects.get_or_create(username='system') + if created: + logger.debug('Created reserved system user.') + main([arg], sys_values=get_system_values(), system_user=system_user) except Exception as e: logger.error(f"Caught Exception in run_backup. Exception: {e}.") + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Exception in run_backup", + action_flag=ADDITION, + change_message=str(e) + ) return False return True if __name__ == '__main__': - main(sys.argv[1:], get_system_values()) + system_user, created = User.objects.get_or_create(username='system') + if created: + logger.debug('Created reserved system user.') + main(sys.argv[1:], get_system_values(), system_user) From 806569b5859440217f6e03828359aee654c60162 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Mon, 26 Feb 2024 12:26:21 -0700 Subject: [PATCH 35/38] - Moving DB task to it's own file --- .../tdpservice/scheduling/db_tasks.py | 21 +++++++++++++++++++ tdrs-backend/tdpservice/scheduling/tasks.py | 14 +------------ tdrs-backend/tdpservice/settings/common.py | 2 +- 3 files changed, 23 insertions(+), 14 deletions(-) create mode 100644 tdrs-backend/tdpservice/scheduling/db_tasks.py diff --git a/tdrs-backend/tdpservice/scheduling/db_tasks.py b/tdrs-backend/tdpservice/scheduling/db_tasks.py new file mode 100644 index 000000000..24690d8c8 --- /dev/null +++ b/tdrs-backend/tdpservice/scheduling/db_tasks.py @@ -0,0 +1,21 @@ +"""Shared celery database tasks file for beat.""" + +from __future__ import absolute_import +from celery import shared_task +import logging +from .db_backup import run_backup + +logger = logging.getLogger(__name__) + +@shared_task +def postgres_backup(*args): + """Run nightly postgres backup.""" + arg = ''.join(args) + logger.debug("postgres_backup::run_backup() run with arg: " + arg) + logger.info("Begining database backup.") + result = run_backup(arg) + if result: + logger.info("Finished database backup.") + else: + logger.error("Failed to complete database backup.") + return result diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index cbb3c8f9f..913f28cd5 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -11,22 +11,10 @@ import logging from tdpservice.email.helpers.account_access_requests import send_num_access_requests_email from tdpservice.email.helpers.account_deactivation_warning import send_deactivation_warning_email -from .db_backup import run_backup + logger = logging.getLogger(__name__) -@shared_task -def postgres_backup(*args): - """Run nightly postgres backup.""" - arg = ''.join(args) - logger.debug("postgres_backup::run_backup() run with arg: " + arg) - logger.info("Begining database backup.") - result = run_backup(arg) - if result: - logger.info("Finished database backup.") - else: - logger.error("Failed to complete database backup.") - return result @shared_task def check_for_accounts_needing_deactivation_warning(): diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index 6fb4b33ba..f909cff42 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -453,7 +453,7 @@ class Common(Configuration): CELERY_BEAT_SCHEDULE = { 'Database Backup': { - 'task': 'tdpservice.scheduling.tasks.postgres_backup', + 'task': 'tdpservice.scheduling.db_tasks.postgres_backup', 'schedule': crontab(minute='0', hour='4'), # Runs at midnight EST 'args': "-b", 'options': { From 9521b3ba381366250a8bacbbaccd3ec8ee4cd334 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Mon, 26 Feb 2024 14:02:52 -0700 Subject: [PATCH 36/38] - fix lint --- tdrs-backend/tdpservice/scheduling/db_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index 8ff11e48c..05f51ad09 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -127,7 +127,7 @@ def restore_database(file_name, postgres_client, database_uri, system_user): try: logger.info("Begining database creation.") cmd = (postgres_client + "createdb " + "-U " + DATABASE_USERNAME + " -h " + DATABASE_HOST + " -T template0 " - + DATABASE_DB_NAME) + + DATABASE_DB_NAME) logger.info(f"Executing create command: {cmd}") os.system(cmd) msg = "Completed database creation." From edd06b8daf2c3310cdf1e626b64ff0e2e9cfcd9b Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Wed, 28 Feb 2024 08:38:01 -0700 Subject: [PATCH 37/38] - Seperate out email tasks - update crontabs --- tdrs-backend/tdpservice/email/tasks.py | 71 +++++++++++++++++ .../tdpservice/scheduling/db_tasks.py | 21 ----- tdrs-backend/tdpservice/scheduling/tasks.py | 76 ++++--------------- .../test/test_user_deactivation_warning.py | 2 +- tdrs-backend/tdpservice/settings/common.py | 6 +- 5 files changed, 88 insertions(+), 88 deletions(-) create mode 100644 tdrs-backend/tdpservice/email/tasks.py delete mode 100644 tdrs-backend/tdpservice/scheduling/db_tasks.py diff --git a/tdrs-backend/tdpservice/email/tasks.py b/tdrs-backend/tdpservice/email/tasks.py new file mode 100644 index 000000000..3da03d93d --- /dev/null +++ b/tdrs-backend/tdpservice/email/tasks.py @@ -0,0 +1,71 @@ +"""Shared celery email tasks for beat.""" + +from __future__ import absolute_import +from tdpservice.users.models import User, AccountApprovalStatusChoices +from django.contrib.auth.models import Group +from django.conf import settings +from django.urls import reverse +from django.utils import timezone +from celery import shared_task +from datetime import datetime, timedelta +import logging +from tdpservice.email.helpers.account_access_requests import send_num_access_requests_email +from tdpservice.email.helpers.account_deactivation_warning import send_deactivation_warning_email + + +logger = logging.getLogger(__name__) + + +@shared_task +def check_for_accounts_needing_deactivation_warning(): + """Check for accounts that need deactivation warning emails.""" + deactivate_in_10_days = users_to_deactivate(10) + deactivate_in_3_days = users_to_deactivate(3) + deactivate_in_1_day = users_to_deactivate(1) + + if deactivate_in_10_days: + send_deactivation_warning_email(deactivate_in_10_days, 10) + if deactivate_in_3_days: + send_deactivation_warning_email(deactivate_in_3_days, 3) + if deactivate_in_1_day: + send_deactivation_warning_email(deactivate_in_1_day, 1) + +def users_to_deactivate(days): + """Return a list of users that have not logged in in the last {180 - days} days.""" + days = 180 - days + return User.objects.filter( + last_login__lte=datetime.now(tz=timezone.utc) - timedelta(days=days), + last_login__gte=datetime.now(tz=timezone.utc) - timedelta(days=days+1), + account_approval_status=AccountApprovalStatusChoices.APPROVED, + ) + +def get_ofa_admin_user_emails(): + """Return a list of OFA System Admin and OFA Admin users.""" + return User.objects.filter( + groups__in=Group.objects.filter(name__in=('OFA Admin', 'OFA System Admin')) + ).values_list('email', flat=True).distinct() + +def get_num_access_requests(): + """Return the number of users requesting access.""" + return User.objects.filter( + account_approval_status=AccountApprovalStatusChoices.ACCESS_REQUEST, + ).count() + +@shared_task +def email_admin_num_access_requests(): + """Send all OFA System Admins an email with how many users have requested access.""" + recipient_email = get_ofa_admin_user_emails() + text_message = '' + subject = 'Number of Active Access Requests' + url = f'{settings.FRONTEND_BASE_URL}{reverse("admin:users_user_changelist")}?o=-2' + email_context = { + 'date': datetime.today(), + 'num_requests': get_num_access_requests(), + 'admin_user_pg': url, + } + + send_num_access_requests_email(recipient_email, + text_message, + subject, + email_context, + ) diff --git a/tdrs-backend/tdpservice/scheduling/db_tasks.py b/tdrs-backend/tdpservice/scheduling/db_tasks.py deleted file mode 100644 index 24690d8c8..000000000 --- a/tdrs-backend/tdpservice/scheduling/db_tasks.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Shared celery database tasks file for beat.""" - -from __future__ import absolute_import -from celery import shared_task -import logging -from .db_backup import run_backup - -logger = logging.getLogger(__name__) - -@shared_task -def postgres_backup(*args): - """Run nightly postgres backup.""" - arg = ''.join(args) - logger.debug("postgres_backup::run_backup() run with arg: " + arg) - logger.info("Begining database backup.") - result = run_backup(arg) - if result: - logger.info("Finished database backup.") - else: - logger.error("Failed to complete database backup.") - return result diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index 913f28cd5..24690d8c8 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -1,71 +1,21 @@ -"""Shared celery tasks file for beat.""" +"""Shared celery database tasks file for beat.""" from __future__ import absolute_import -from tdpservice.users.models import User, AccountApprovalStatusChoices -from django.contrib.auth.models import Group -from django.conf import settings -from django.urls import reverse -from django.utils import timezone from celery import shared_task -from datetime import datetime, timedelta import logging -from tdpservice.email.helpers.account_access_requests import send_num_access_requests_email -from tdpservice.email.helpers.account_deactivation_warning import send_deactivation_warning_email - +from .db_backup import run_backup logger = logging.getLogger(__name__) - @shared_task -def check_for_accounts_needing_deactivation_warning(): - """Check for accounts that need deactivation warning emails.""" - deactivate_in_10_days = users_to_deactivate(10) - deactivate_in_3_days = users_to_deactivate(3) - deactivate_in_1_day = users_to_deactivate(1) - - if deactivate_in_10_days: - send_deactivation_warning_email(deactivate_in_10_days, 10) - if deactivate_in_3_days: - send_deactivation_warning_email(deactivate_in_3_days, 3) - if deactivate_in_1_day: - send_deactivation_warning_email(deactivate_in_1_day, 1) - -def users_to_deactivate(days): - """Return a list of users that have not logged in in the last {180 - days} days.""" - days = 180 - days - return User.objects.filter( - last_login__lte=datetime.now(tz=timezone.utc) - timedelta(days=days), - last_login__gte=datetime.now(tz=timezone.utc) - timedelta(days=days+1), - account_approval_status=AccountApprovalStatusChoices.APPROVED, - ) - -def get_ofa_admin_user_emails(): - """Return a list of OFA System Admin and OFA Admin users.""" - return User.objects.filter( - groups__in=Group.objects.filter(name__in=('OFA Admin', 'OFA System Admin')) - ).values_list('email', flat=True).distinct() - -def get_num_access_requests(): - """Return the number of users requesting access.""" - return User.objects.filter( - account_approval_status=AccountApprovalStatusChoices.ACCESS_REQUEST, - ).count() - -@shared_task -def email_admin_num_access_requests(): - """Send all OFA System Admins an email with how many users have requested access.""" - recipient_email = get_ofa_admin_user_emails() - text_message = '' - subject = 'Number of Active Access Requests' - url = f'{settings.FRONTEND_BASE_URL}{reverse("admin:users_user_changelist")}?o=-2' - email_context = { - 'date': datetime.today(), - 'num_requests': get_num_access_requests(), - 'admin_user_pg': url, - } - - send_num_access_requests_email(recipient_email, - text_message, - subject, - email_context, - ) +def postgres_backup(*args): + """Run nightly postgres backup.""" + arg = ''.join(args) + logger.debug("postgres_backup::run_backup() run with arg: " + arg) + logger.info("Begining database backup.") + result = run_backup(arg) + if result: + logger.info("Finished database backup.") + else: + logger.error("Failed to complete database backup.") + return result diff --git a/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py b/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py index 5a3897a8e..5cfdea507 100644 --- a/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py +++ b/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py @@ -4,7 +4,7 @@ import pytest import tdpservice from datetime import datetime, timedelta -from tdpservice.scheduling.tasks import check_for_accounts_needing_deactivation_warning +from tdpservice.email.tasks import check_for_accounts_needing_deactivation_warning from tdpservice.users.models import AccountApprovalStatusChoices import logging diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index f909cff42..6dc076e22 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -453,7 +453,7 @@ class Common(Configuration): CELERY_BEAT_SCHEDULE = { 'Database Backup': { - 'task': 'tdpservice.scheduling.db_tasks.postgres_backup', + 'task': 'tdpservice.scheduling.tasks.postgres_backup', 'schedule': crontab(minute='0', hour='4'), # Runs at midnight EST 'args': "-b", 'options': { @@ -461,7 +461,7 @@ class Common(Configuration): }, }, 'Account Deactivation Warning': { - 'task': 'tdpservice.scheduling.tasks.check_for_accounts_needing_deactivation_warning', + 'task': 'tdpservice.email.tasks.check_for_accounts_needing_deactivation_warning', 'schedule': crontab(day_of_week='*', hour='13', minute='0'), # Every day at 1pm UTC (9am EST) 'options': { @@ -469,7 +469,7 @@ class Common(Configuration): }, }, 'Email Admin Number of Access Requests' : { - 'task': 'tdpservice.scheduling.tasks.email_admin_num_access_requests', + 'task': 'tdpservice.email.tasks.email_admin_num_access_requests', 'schedule': crontab(minute='0', hour='1', day_of_week='*', day_of_month='*', month_of_year='*'), # Every day at 1am UTC (9pm EST) } } From 2f6d998572d7327bd6abf6bb57deecf33fe12753 Mon Sep 17 00:00:00 2001 From: Eric Lipe Date: Wed, 28 Feb 2024 08:54:38 -0700 Subject: [PATCH 38/38] - update tests --- .../scheduling/test/test_user_deactivation_warning.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py b/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py index 5cfdea507..369688d1a 100644 --- a/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py +++ b/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py @@ -19,7 +19,7 @@ def test_deactivation_email_10_days(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -38,7 +38,7 @@ def test_deactivation_email_3_days(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -57,7 +57,7 @@ def test_deactivation_email_1_days(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -77,7 +77,7 @@ def test_no_users_to_warn(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -94,5 +94,5 @@ def test_users_to_deactivate(user): user.first_name = 'UniqueName' user.account_approval_status = AccountApprovalStatusChoices.APPROVED user.save() - users = tdpservice.scheduling.tasks.users_to_deactivate(10) + users = tdpservice.email.tasks.users_to_deactivate(10) assert users[0].first_name == user.first_name