diff --git a/tdrs-backend/tdpservice/email/tasks.py b/tdrs-backend/tdpservice/email/tasks.py new file mode 100644 index 000000000..3da03d93d --- /dev/null +++ b/tdrs-backend/tdpservice/email/tasks.py @@ -0,0 +1,71 @@ +"""Shared celery email tasks for beat.""" + +from __future__ import absolute_import +from tdpservice.users.models import User, AccountApprovalStatusChoices +from django.contrib.auth.models import Group +from django.conf import settings +from django.urls import reverse +from django.utils import timezone +from celery import shared_task +from datetime import datetime, timedelta +import logging +from tdpservice.email.helpers.account_access_requests import send_num_access_requests_email +from tdpservice.email.helpers.account_deactivation_warning import send_deactivation_warning_email + + +logger = logging.getLogger(__name__) + + +@shared_task +def check_for_accounts_needing_deactivation_warning(): + """Check for accounts that need deactivation warning emails.""" + deactivate_in_10_days = users_to_deactivate(10) + deactivate_in_3_days = users_to_deactivate(3) + deactivate_in_1_day = users_to_deactivate(1) + + if deactivate_in_10_days: + send_deactivation_warning_email(deactivate_in_10_days, 10) + if deactivate_in_3_days: + send_deactivation_warning_email(deactivate_in_3_days, 3) + if deactivate_in_1_day: + send_deactivation_warning_email(deactivate_in_1_day, 1) + +def users_to_deactivate(days): + """Return a list of users that have not logged in in the last {180 - days} days.""" + days = 180 - days + return User.objects.filter( + last_login__lte=datetime.now(tz=timezone.utc) - timedelta(days=days), + last_login__gte=datetime.now(tz=timezone.utc) - timedelta(days=days+1), + account_approval_status=AccountApprovalStatusChoices.APPROVED, + ) + +def get_ofa_admin_user_emails(): + """Return a list of OFA System Admin and OFA Admin users.""" + return User.objects.filter( + groups__in=Group.objects.filter(name__in=('OFA Admin', 'OFA System Admin')) + ).values_list('email', flat=True).distinct() + +def get_num_access_requests(): + """Return the number of users requesting access.""" + return User.objects.filter( + account_approval_status=AccountApprovalStatusChoices.ACCESS_REQUEST, + ).count() + +@shared_task +def email_admin_num_access_requests(): + """Send all OFA System Admins an email with how many users have requested access.""" + recipient_email = get_ofa_admin_user_emails() + text_message = '' + subject = 'Number of Active Access Requests' + url = f'{settings.FRONTEND_BASE_URL}{reverse("admin:users_user_changelist")}?o=-2' + email_context = { + 'date': datetime.today(), + 'num_requests': get_num_access_requests(), + 'admin_user_pg': url, + } + + send_num_access_requests_email(recipient_email, + text_message, + subject, + email_context, + ) diff --git a/tdrs-backend/tdpservice/scheduling/db_backup.py b/tdrs-backend/tdpservice/scheduling/db_backup.py index ffcce9f22..05f51ad09 100644 --- a/tdrs-backend/tdpservice/scheduling/db_backup.py +++ b/tdrs-backend/tdpservice/scheduling/db_backup.py @@ -12,12 +12,15 @@ from django.conf import settings import boto3 import logging +from tdpservice.users.models import User +from django.contrib.admin.models import ADDITION, ContentType, LogEntry logger = logging.getLogger(__name__) OS_ENV = os.environ +content_type = ContentType.objects.get_for_model(LogEntry) def get_system_values(): """Return dict of keys and settings to use whether local or deployed.""" @@ -26,18 +29,26 @@ def get_system_values(): sys_values['SPACE'] = json.loads(OS_ENV['VCAP_APPLICATION'])['space_name'] # Postgres client pg_dump directory - pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], - stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) - pgdump_search.wait() - pg_dump_paths, pgdump_search_error = pgdump_search.communicate() - pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') - if pg_dump_paths[0] == '': - raise Exception("Postgres client is not found") - - for _ in pg_dump_paths: - if 'pg_dump' in str(_) and 'postgresql' in str(_): - sys_values['POSTGRES_CLIENT'] = _[:_.find('pg_dump')] - print("Found PG client here: {}".format(_)) + sys_values['POSTGRES_CLIENT_DIR'] = "/home/vcap/deps/0/apt/usr/lib/postgresql/12/bin/" + + # If the client directory and binaries don't exist, we need to find them. + if not (os.path.exists(sys_values['POSTGRES_CLIENT_DIR']) and + os.path.isfile(f"{sys_values['POSTGRES_CLIENT_DIR']}pg_dump")): + logger.warning("Couldn't find postgres client binaries at the hardcoded path: " + f"{sys_values['POSTGRES_CLIENT_DIR']}. Searching OS for client directory.") + pgdump_search = subprocess.Popen(["find", "/", "-iname", "pg_dump"], + stderr=subprocess.DEVNULL, stdout=subprocess.PIPE) + pgdump_search.wait() + pg_dump_paths, pgdump_search_error = pgdump_search.communicate() + pg_dump_paths = pg_dump_paths.decode("utf-8").split('\n') + if pg_dump_paths[0] == '': + raise Exception("Postgres client is not found") + + for _ in pg_dump_paths: + if 'pg_dump' in str(_) and 'postgresql' in str(_): + sys_values['POSTGRES_CLIENT'] = _[:_.find('pg_dump')] + + logger.info(f"Using postgres client at: {sys_values['POSTGRES_CLIENT_DIR']}") sys_values['S3_ENV_VARS'] = json.loads(OS_ENV['VCAP_SERVICES'])['s3'] sys_values['S3_CREDENTIALS'] = sys_values['S3_ENV_VARS'][0]['credentials'] @@ -73,7 +84,8 @@ def get_system_values(): def backup_database(file_name, postgres_client, - database_uri): + database_uri, + system_user): """Back up postgres database into file. :param file_name: back up file name @@ -82,15 +94,28 @@ def backup_database(file_name, pg_dump -F c --no-acl --no-owner -f backup.pg postgresql://${USERNAME}:${PASSWORD}@${HOST}:${PORT}/${NAME} """ try: - os.system(postgres_client + "pg_dump -Fc --no-acl -f " + file_name + " -d " + database_uri) - print("Wrote pg dumpfile to {}".format(file_name)) + cmd = postgres_client + "pg_dump -Fc --no-acl -f " + file_name + " -d " + database_uri + logger.info(f"Executing backup command: {cmd}") + os.system(cmd) + msg = "Successfully executed backup. Wrote pg dumpfile to {}".format(file_name) + logger.info(msg) + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed Database Backup", + action_flag=ADDITION, + change_message=msg + ) + file_size = os.path.getsize(file_name) + logger.info(f"Pg dumpfile size in bytes: {file_size}.") return True except Exception as e: - print(e) - return False + logger.error(f"Caught Exception while backing up database. Exception: {e}") + raise e -def restore_database(file_name, postgres_client, database_uri): +def restore_database(file_name, postgres_client, database_uri, system_user): """Restore the database from filename. :param file_name: database backup filename @@ -100,10 +125,23 @@ def restore_database(file_name, postgres_client, database_uri): DATABASE_DB_NAME] = get_database_credentials(database_uri) os.environ['PGPASSWORD'] = DATABASE_PASSWORD try: - os.system(postgres_client + "createdb " + "-U " + DATABASE_USERNAME + " -h " + DATABASE_HOST + " -T template0 " - + DATABASE_DB_NAME) + logger.info("Begining database creation.") + cmd = (postgres_client + "createdb " + "-U " + DATABASE_USERNAME + " -h " + DATABASE_HOST + " -T template0 " + + DATABASE_DB_NAME) + logger.info(f"Executing create command: {cmd}") + os.system(cmd) + msg = "Completed database creation." + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed Database create", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) except Exception as e: - print(e) + logger.error(f"Caught exception while creating the database. Exception: {e}.") return False # write .pgpass @@ -112,12 +150,25 @@ def restore_database(file_name, postgres_client, database_uri): os.environ['PGPASSFILE'] = '/home/vcap/.pgpass' os.system('chmod 0600 /home/vcap/.pgpass') - os.system(postgres_client + "pg_restore" + " -p " + DATABASE_PORT + " -h " + - DATABASE_HOST + " -U " + DATABASE_USERNAME + " -d " + DATABASE_DB_NAME + " " + file_name) + logger.info("Begining database restoration.") + cmd = (postgres_client + "pg_restore" + " -p " + DATABASE_PORT + " -h " + + DATABASE_HOST + " -U " + DATABASE_USERNAME + " -d " + DATABASE_DB_NAME + " " + file_name) + logger.info(f"Executing restore command: {cmd}") + os.system(cmd) + msg = "Completed database restoration." + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed Database restore", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) return True -def upload_file(file_name, bucket, sys_values, object_name=None, region='us-gov-west-1'): +def upload_file(file_name, bucket, sys_values, system_user, object_name=None, region='us-gov-west-1'): """Upload a file to an S3 bucket. :param file_name: file name being uploaded to s3 bucket @@ -129,16 +180,27 @@ def upload_file(file_name, bucket, sys_values, object_name=None, region='us-gov- if object_name is None: object_name = os.path.basename(file_name) + logger.info(f"Uploading {file_name} to S3.") s3_client = boto3.client('s3', region_name=sys_values['S3_REGION']) s3_client.upload_file(file_name, bucket, object_name) - print("Uploaded {} to S3:{}{}".format(file_name, bucket, object_name)) + msg = "Successfully uploaded {} to s3://{}/{}.".format(file_name, bucket, object_name) + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed database backup S3 upload", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) return True def download_file(bucket, file_name, region, + system_user, object_name=None, ): """Download file from s3 bucket.""" @@ -150,9 +212,19 @@ def download_file(bucket, """ if object_name is None: object_name = os.path.basename(file_name) + logger.info("Begining download for backup file.") s3 = boto3.client('s3', region_name=region) s3.download_file(bucket, object_name, file_name) - print("Downloaded s3 file {}{} to {}.".format(bucket, object_name, file_name)) + msg = "Successfully downloaded s3 file {}/{} to {}.".format(bucket, object_name, file_name) + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Executed database backup S3 download", + action_flag=ADDITION, + change_message=msg + ) + logger.info(msg) def list_s3_files(sys_values): @@ -187,7 +259,7 @@ def get_database_credentials(database_uri): return [username, password, host, port, database_name] -def main(argv, sys_values): +def main(argv, sys_values, system_user): """Handle commandline args.""" arg_file = "/tmp/backup.pg" arg_database = sys_values['DATABASE_URI'] @@ -210,31 +282,75 @@ def main(argv, sys_values): raise e if arg_to_backup: + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Begining Database Backup", + action_flag=ADDITION, + change_message="Begining database backup." + ) # back up database backup_database(file_name=arg_file, - postgres_client=sys_values['POSTGRES_CLIENT'], - database_uri=arg_database) + postgres_client=sys_values['POSTGRES_CLIENT_DIR'], + database_uri=arg_database, + system_user=system_user) # upload backup file upload_file(file_name=arg_file, bucket=sys_values['S3_BUCKET'], sys_values=sys_values, + system_user=system_user, region=sys_values['S3_REGION'], - object_name="backup"+arg_file) + object_name="backup"+arg_file, + ) + + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Finished Database Backup", + action_flag=ADDITION, + change_message="Finished database backup." + ) + + logger.info(f"Deleting {arg_file} from local storage.") os.system('rm ' + arg_file) elif arg_to_restore: + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Begining Database Restore", + action_flag=ADDITION, + change_message="Begining database restore." + ) + # download file from s3 download_file(bucket=sys_values['S3_BUCKET'], file_name=arg_file, region=sys_values['S3_REGION'], - object_name="backup"+arg_file) + system_user=system_user, + object_name="backup"+arg_file, + ) # restore database restore_database(file_name=arg_file, - postgres_client=sys_values['POSTGRES_CLIENT'], - database_uri=arg_database) - + postgres_client=sys_values['POSTGRES_CLIENT_DIR'], + database_uri=arg_database, + system_user=system_user) + + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Finished Database Restore", + action_flag=ADDITION, + change_message="Finished database restore." + ) + + logger.info(f"Deleting {arg_file} from local storage.") os.system('rm ' + arg_file) @@ -243,8 +359,27 @@ def run_backup(arg): if settings.USE_LOCALSTACK is True: logger.info("Won't backup locally") else: - main([arg], sys_values=get_system_values()) + try: + system_user, created = User.objects.get_or_create(username='system') + if created: + logger.debug('Created reserved system user.') + main([arg], sys_values=get_system_values(), system_user=system_user) + except Exception as e: + logger.error(f"Caught Exception in run_backup. Exception: {e}.") + LogEntry.objects.log_action( + user_id=system_user.pk, + content_type_id=content_type.pk, + object_id=None, + object_repr="Exception in run_backup", + action_flag=ADDITION, + change_message=str(e) + ) + return False + return True if __name__ == '__main__': - main(sys.argv[1:], get_system_values()) + system_user, created = User.objects.get_or_create(username='system') + if created: + logger.debug('Created reserved system user.') + main(sys.argv[1:], get_system_values(), system_user) diff --git a/tdrs-backend/tdpservice/scheduling/tasks.py b/tdrs-backend/tdpservice/scheduling/tasks.py index c14faebf2..24690d8c8 100644 --- a/tdrs-backend/tdpservice/scheduling/tasks.py +++ b/tdrs-backend/tdpservice/scheduling/tasks.py @@ -1,16 +1,8 @@ -"""Shared celery tasks file for beat.""" +"""Shared celery database tasks file for beat.""" from __future__ import absolute_import -from tdpservice.users.models import User, AccountApprovalStatusChoices -from django.contrib.auth.models import Group -from django.conf import settings -from django.urls import reverse -from django.utils import timezone from celery import shared_task -from datetime import datetime, timedelta import logging -from tdpservice.email.helpers.account_access_requests import send_num_access_requests_email -from tdpservice.email.helpers.account_deactivation_warning import send_deactivation_warning_email from .db_backup import run_backup logger = logging.getLogger(__name__) @@ -20,59 +12,10 @@ def postgres_backup(*args): """Run nightly postgres backup.""" arg = ''.join(args) logger.debug("postgres_backup::run_backup() run with arg: " + arg) - run_backup(arg) - return True - -@shared_task -def check_for_accounts_needing_deactivation_warning(): - """Check for accounts that need deactivation warning emails.""" - deactivate_in_10_days = users_to_deactivate(10) - deactivate_in_3_days = users_to_deactivate(3) - deactivate_in_1_day = users_to_deactivate(1) - - if deactivate_in_10_days: - send_deactivation_warning_email(deactivate_in_10_days, 10) - if deactivate_in_3_days: - send_deactivation_warning_email(deactivate_in_3_days, 3) - if deactivate_in_1_day: - send_deactivation_warning_email(deactivate_in_1_day, 1) - -def users_to_deactivate(days): - """Return a list of users that have not logged in in the last {180 - days} days.""" - days = 180 - days - return User.objects.filter( - last_login__lte=datetime.now(tz=timezone.utc) - timedelta(days=days), - last_login__gte=datetime.now(tz=timezone.utc) - timedelta(days=days+1), - account_approval_status=AccountApprovalStatusChoices.APPROVED, - ) - -def get_ofa_admin_user_emails(): - """Return a list of OFA System Admin and OFA Admin users.""" - return User.objects.filter( - groups__in=Group.objects.filter(name__in=('OFA Admin', 'OFA System Admin')) - ).values_list('email', flat=True).distinct() - -def get_num_access_requests(): - """Return the number of users requesting access.""" - return User.objects.filter( - account_approval_status=AccountApprovalStatusChoices.ACCESS_REQUEST, - ).count() - -@shared_task -def email_admin_num_access_requests(): - """Send all OFA System Admins an email with how many users have requested access.""" - recipient_email = get_ofa_admin_user_emails() - text_message = '' - subject = 'Number of Active Access Requests' - url = f'{settings.FRONTEND_BASE_URL}{reverse("admin:users_user_changelist")}?o=-2' - email_context = { - 'date': datetime.today(), - 'num_requests': get_num_access_requests(), - 'admin_user_pg': url, - } - - send_num_access_requests_email(recipient_email, - text_message, - subject, - email_context, - ) + logger.info("Begining database backup.") + result = run_backup(arg) + if result: + logger.info("Finished database backup.") + else: + logger.error("Failed to complete database backup.") + return result diff --git a/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py b/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py index 5a3897a8e..369688d1a 100644 --- a/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py +++ b/tdrs-backend/tdpservice/scheduling/test/test_user_deactivation_warning.py @@ -4,7 +4,7 @@ import pytest import tdpservice from datetime import datetime, timedelta -from tdpservice.scheduling.tasks import check_for_accounts_needing_deactivation_warning +from tdpservice.email.tasks import check_for_accounts_needing_deactivation_warning from tdpservice.users.models import AccountApprovalStatusChoices import logging @@ -19,7 +19,7 @@ def test_deactivation_email_10_days(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -38,7 +38,7 @@ def test_deactivation_email_3_days(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -57,7 +57,7 @@ def test_deactivation_email_1_days(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -77,7 +77,7 @@ def test_no_users_to_warn(user, mocker): return_value=None ) mocker.patch( - 'tdpservice.scheduling.tasks.users_to_deactivate', + 'tdpservice.email.tasks.users_to_deactivate', return_value=[user] ) @@ -94,5 +94,5 @@ def test_users_to_deactivate(user): user.first_name = 'UniqueName' user.account_approval_status = AccountApprovalStatusChoices.APPROVED user.save() - users = tdpservice.scheduling.tasks.users_to_deactivate(10) + users = tdpservice.email.tasks.users_to_deactivate(10) assert users[0].first_name == user.first_name diff --git a/tdrs-backend/tdpservice/settings/common.py b/tdrs-backend/tdpservice/settings/common.py index 7d9126716..6dc076e22 100644 --- a/tdrs-backend/tdpservice/settings/common.py +++ b/tdrs-backend/tdpservice/settings/common.py @@ -452,7 +452,7 @@ class Common(Configuration): CELERY_TIMEZONE = 'UTC' CELERY_BEAT_SCHEDULE = { - 'name': { + 'Database Backup': { 'task': 'tdpservice.scheduling.tasks.postgres_backup', 'schedule': crontab(minute='0', hour='4'), # Runs at midnight EST 'args': "-b", @@ -460,8 +460,8 @@ class Common(Configuration): 'expires': 15.0, }, }, - 'name': { - 'task': 'tdpservice.scheduling.tasks.check_for_accounts_needing_deactivation_warning', + 'Account Deactivation Warning': { + 'task': 'tdpservice.email.tasks.check_for_accounts_needing_deactivation_warning', 'schedule': crontab(day_of_week='*', hour='13', minute='0'), # Every day at 1pm UTC (9am EST) 'options': { @@ -469,7 +469,7 @@ class Common(Configuration): }, }, 'Email Admin Number of Access Requests' : { - 'task': 'tdpservice.scheduling.tasks.email_admin_num_access_requests', + 'task': 'tdpservice.email.tasks.email_admin_num_access_requests', 'schedule': crontab(minute='0', hour='1', day_of_week='*', day_of_month='*', month_of_year='*'), # Every day at 1am UTC (9pm EST) } } diff --git a/terraform/dev/main.tf b/terraform/dev/main.tf index da1df5b10..e65d8e8c7 100644 --- a/terraform/dev/main.tf +++ b/terraform/dev/main.tf @@ -52,6 +52,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-dev" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["micro-psql"] + json_params = "{\"version\": \"12\"}" recursive_delete = true } diff --git a/terraform/production/main.tf b/terraform/production/main.tf index 6948ecd72..9fb0e16ca 100644 --- a/terraform/production/main.tf +++ b/terraform/production/main.tf @@ -51,6 +51,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-prod" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["medium-psql"] + json_params = "{\"version\": \"12\"}" recursive_delete = true } diff --git a/terraform/staging/main.tf b/terraform/staging/main.tf index 7b6d45a1f..00186bb34 100644 --- a/terraform/staging/main.tf +++ b/terraform/staging/main.tf @@ -51,6 +51,7 @@ resource "cloudfoundry_service_instance" "database" { name = "tdp-db-staging" space = data.cloudfoundry_space.space.id service_plan = data.cloudfoundry_service.rds.service_plans["micro-psql"] + json_params = "{\"version\": \"12\"}" recursive_delete = true }