Skip to content

Commit

Permalink
RDS backups to S3
Browse files Browse the repository at this point in the history
* Conditonally create a scheduled Fargate task to backup RDS databases
  to S3
  • Loading branch information
Stretch96 committed Jul 31, 2024
1 parent a454611 commit f6bb1c0
Show file tree
Hide file tree
Showing 19 changed files with 812 additions and 4 deletions.
45 changes: 45 additions & 0 deletions README.md

Large diffs are not rendered by default.

18 changes: 18 additions & 0 deletions buildspecs/dalmatian-sql-backup.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
version: 0.2

phases:
pre_build:
commands:
- echo Build started on `date`
- echo Logging in to Amazon ECR...
- aws ecr get-login-password --region $AWS_DEFAULT_REGION | docker login --username AWS --password-stdin $AWS_ACCOUNT_ID.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com
- echo Building dalmatian-sql-backup docker image ...
- docker build -t dalmatian-sql-backup:latest .
build:
commands:
- echo Adding ECR repo tag...
- docker tag dalmatian-sql-backup:latest $REPOSITORY_URI:latest
post_build:
commands:
- echo Pushing the Docker image...
- docker push $REPOSITORY_URI:latest
4 changes: 2 additions & 2 deletions container-definitions/app.json.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@
],
%{ endif }
%{ if environment != "[]" }
"environment": "${environment},
"environment": ${environment},
%{ endif }
%{ if secrets != "[]" }
"secrets": "${secrets},
"secrets": ${secrets},
%{ endif }
%{ if environment_file_s3 != "" }
"environmentFiles": [
Expand Down
23 changes: 23 additions & 0 deletions ecs-entrypoints/rds-s3-backups-mysql.txt.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
DATE_STRING=$$(date +%Y%m%d%H%M) \
&& mkdir -p /tmp/sqlbackups/${rds_identifier} \
&& mysql -N \
-u $$DB_USERNAME \
-p$$DB_PASSWORD \
-h $$DB_HOST \
-e 'show databases' \
| grep -Ev 'Databases|information_schema|performance_schema|sys' \
while read DB_NAME; \
do \
mysqldump \
-u $$DB_USER \
-p$$DB_PASSWORD \
-h $$DB_HOST \
--set-gtid-purged=OFF \
--column-statistics=0 \
--single-transaction \"$$DB_NAME\" > /tmp/sqlbackups/$$DATE_STRING-$$DB_NAME.sql; \
done \
&& cd /tmp/sqlbackups \
&& aws s3 sync . s3://${s3_bucket_name} \
--storage-class STANDARD_IA \
&& rm /tmp/sqlbackups/*.sql \
&& echo 'SQL Backup Success!'
22 changes: 22 additions & 0 deletions ecs-entrypoints/rds-s3-backups-postgres.txt.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
DATE_STRING=$(date +%Y%m%d%H%M) \
&& mkdir -p /tmp/sqlbackups/${rds_identifier} \
&& PGPASSWORD=\"$$DB_PASSWORD\" \
psql \
-U $$DB_USERNAME \
-h $$DB_HOST \
-t \
-c 'SELECT datname FROM pg_database WHERE NOT datistemplate' \
| while read DB_NAME; \
do \
if [[ -n \"$$DB_NAME\" && \"$$DB_NAME\" != \"rdsadmin\" ]]; \
then \
pg_dump \
--clean \
--if-exists postgres://$$DB_USERNAME:$$DB_PASSWORD@$$DB_HOST:5432/$$DB_NAME > /tmp/sqlbackups/${rds_identifier}/$$DATE_STRING-$$DB_NAME.sql; \
fi; \
done \
&& cd /tmp/sqlbackups/${rds_identifier} \
&& aws s3 sync . s3://${s3_bucket_name} \
--storage-class STANDARD_IA \
&& rm *.sql \
&& echo 'SQL Backup Success!'
12 changes: 12 additions & 0 deletions kms-infrastructure.tf
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,18 @@ resource "aws_kms_key" "infrastructure" {
for k, v in local.infrastructure_ecs_cluster_services : aws_iam_role.infrastructure_ecs_cluster_service_task_execution[k].arn if v["enable_execute_command"] == true && local.infrastructure_ecs_cluster_enable_execute_command_logging
])
}
)}${length(local.infrastructure_rds) > 0 && local.infrastructure_kms_encryption && local.enable_infrastructure_rds_backup_to_s3 ? "," : ""}
${templatefile("${path.root}/policies/kms-key-policy-statements/role-allow-encrypt.json.tpl",
{
role_arns = jsonencode([
for k, v in local.infrastructure_rds : aws_iam_role.infrastructure_rds_s3_backups_task[k].arn if local.enable_infrastructure_rds_backup_to_s3
])
}
)}${length(local.infrastructure_rds) > 0 && local.infrastructure_kms_encryption && local.enable_infrastructure_rds_backup_to_s3 ? "," : ""}
${templatefile("${path.root}/policies/kms-key-policy-statements/cloudwatch-logs-allow.json.tpl",
{
log_group_arn = length(local.infrastructure_rds) > 0 && local.infrastructure_kms_encryption && local.enable_infrastructure_rds_backup_to_s3 ? "arn:aws:logs:${local.aws_region}:${local.aws_account_id}:log-group:${local.resource_prefix}-infrastructure-rds-s3-backups-*" : ""
}
)}${contains([for k, v in local.custom_s3_buckets : (v["cloudfront_dedicated_distribution"] == true || v["cloudfront_infrastructure_ecs_cluster_service"] != null) && v["create_dedicated_kms_key"] == false ? true : false], true) && local.infrastructure_kms_encryption ? "," : ""}
${templatefile("${path.root}/policies/kms-key-policy-statements/cloudfront-distribution-allow.json.tpl",
{
Expand Down
70 changes: 70 additions & 0 deletions local-exec-scripts/trigger-codedeploy-project.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/bin/bash

# exit on failures
set -e
set -o pipefail

usage() {
echo "Usage: $(basename "$0") [OPTIONS]" 1>&2
echo " -h - help"
echo " -n - CodeBuild project name"
exit 1
}

while getopts "n:h" opt; do
case $opt in
n)
PROJECT_NAME=$OPTARG
;;
h)
usage
;;
*)
usage
;;
esac
done

if [ -z "$PROJECT_NAME" ]
then
usage
fi

BUILD_ID="$(
aws codebuild start-build \
--project-name "$PROJECT_NAME" \
| jq -r '.build.id'
)"

echo "Triggered $PROJECT_NAME CodeBuild project ($BUILD_ID) ..."

COMPLETED=""
while [ -z "$COMPLETED" ]
do
sleep 10
echo "Checking progress of CodeBuild $BUILD_ID ..."
BUILD_PROGRESS="$(
aws codebuild batch-get-builds \
--ids "$BUILD_ID" \
)"
COMPLETED="$(
echo "$BUILD_PROGRESS" \
| jq -r \
'.builds[0].phases[] | select(.phaseType == "COMPLETED")'
)"
done
echo "CodeBuild $BUILD_ID Completed, checking for failures ..."

FAILURES="$(
echo "$BUILD_PROGRESS" \
| jq -r \
'.builds[0].phases[] | select(.phaseStatus == "FAILED")'
)"

if [ -n "$FAILURES" ]
then
echo "$FAILURES"
exit 1
fi

echo "CodeBuild $BUILD_ID completed without failures"
17 changes: 15 additions & 2 deletions locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,14 @@ locals {
length(local.custom_s3_buckets) != 0 ||
local.enable_cloudformatian_s3_template_store ||
local.enable_infrastructure_vpc_transfer_s3_bucket ||
local.infrastructure_ecs_cluster_enable_execute_command_logging
local.infrastructure_ecs_cluster_enable_execute_command_logging ||
local.enable_infrastructure_rds_backup_to_s3
)
logs_bucket_s3_source_arns = concat(
length(local.infrastructure_ecs_cluster_services) != 0 ? [aws_s3_bucket.infrastructure_ecs_cluster_service_build_pipeline_artifact_store[0].arn] : [],
local.enable_infrastructure_vpc_transfer_s3_bucket ? [aws_s3_bucket.infrastructure_vpc_transfer[0].arn] : [],
[for k, v in local.custom_s3_buckets : aws_s3_bucket.custom[k].arn]
[for k, v in local.custom_s3_buckets : aws_s3_bucket.custom[k].arn],
local.enable_infrastructure_rds_backup_to_s3 ? [aws_s3_bucket.infrastructure_rds_s3_backups[0].arn] : [],
)
logs_bucket_logs_source_arns = concat(
local.infrastructure_vpc_flow_logs_s3_with_athena ? ["arn:aws:logs:${local.aws_region}:${local.aws_account_id}:*"] : []
Expand Down Expand Up @@ -232,6 +234,17 @@ locals {
"mysql" = 3306
"postgres" = 5432
}
rds_s3_backups_container_entrypoint_file = {
"mysql" = "${path.root}/ecs-entrypoints/rds-s3-backups-mysql.txt.tpl"
"postgres" = "${path.root}/ecs-entrypoints/rds-s3-backups-postgres.txt.tpl"
}
enable_infrastructure_rds_backup_to_s3 = var.enable_infrastructure_rds_backup_to_s3
infrastructure_rds_backup_to_s3_cron_expression = var.infrastructure_rds_backup_to_s3_cron_expression
infrastructure_rds_backup_to_s3_retention = var.infrastructure_rds_backup_to_s3_retention
enable_infrastructure_rds_tooling_ecs_cluster = anytrue([
local.enable_infrastructure_rds_backup_to_s3,
])
infrastructure_rds_tooling_ecs_cluster_name = "${local.resource_prefix}-infrastructure-rds-tooling"

infrastructure_elasticache_defaults = var.infrastructure_elasticache_defaults
infrastructure_elasticache_keys = length(var.infrastructure_elasticache) > 0 ? keys(values(var.infrastructure_elasticache)[0]) : []
Expand Down
16 changes: 16 additions & 0 deletions policies/codebuild-allow-builds.json.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"codebuild:StartBuild",
"codebuild:StopBuild",
"codebuild:BatchGet*",
"codebuild:Get*",
"codebuild:List*"
],
"Effect": "Allow",
"Resource": "*"
}
]
}
12 changes: 12 additions & 0 deletions policies/secrets-manager-get-secret-value.json.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"secretsmanager:GetSecretValue"
],
"Resource": ${secret_name_arns}
}
]
}
9 changes: 9 additions & 0 deletions rds-infrastructure-cloudwatch-logs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,12 @@ resource "aws_cloudwatch_log_group" "infrastructure_rds_exports" {
retention_in_days = 30
kms_key_id = local.infrastructure_kms_encryption ? aws_kms_key.infrastructure[0].id : null
}

resource "aws_cloudwatch_log_group" "infrastructure_rds_s3_backups" {
for_each = local.enable_infrastructure_rds_backup_to_s3 ? local.infrastructure_rds : {}

name = "${local.resource_prefix}-infrastructure-rds-s3-backups-${each.key}"
retention_in_days = local.infrastructure_rds_backup_to_s3_retention
kms_key_id = local.infrastructure_kms_encryption ? aws_kms_key.infrastructure[0].arn : null
skip_destroy = true
}
17 changes: 17 additions & 0 deletions rds-infrastructure-s3-backups-ecr.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
resource "aws_ecr_repository" "infrastructure_rds_s3_backups" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix}-rds-s3-backups"

#tfsec:ignore:aws-ecr-enforce-immutable-repository
image_tag_mutability = "MUTABLE"

encryption_configuration {
encryption_type = local.infrastructure_kms_encryption ? "KMS" : "AES256"
kms_key = local.infrastructure_kms_encryption ? aws_kms_key.infrastructure[0].arn : null
}

image_scanning_configuration {
scan_on_push = true
}
}
128 changes: 128 additions & 0 deletions rds-infrastructure-s3-backups-image-codebuild.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
resource "aws_iam_role" "infrastructure_rds_s3_backups_image_codebuild" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix}-${substr(sha512("rds-s3-backups-image-codebuild"), 0, 6)}"
description = "${local.resource_prefix}-rds-s3-backups-image-codebuild"
assume_role_policy = templatefile(
"${path.root}/policies/assume-roles/service-principle-standard.json.tpl",
{ services = jsonencode(["codebuild.amazonaws.com", "events.amazonaws.com"]) }
)
}

resource "aws_iam_policy" "infrastructure_rds_s3_backups_image_codebuild_cloudwatch_rw" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix}-${substr(sha512("rds-s3-backups-image-codebuild-cloudwatch-rw"), 0, 6)}"
description = "${local.resource_prefix}-rds-s3-backups-image-codebuild-cloudwatch-rw"
policy = templatefile("${path.root}/policies/cloudwatch-logs-rw.json.tpl", {})
}

resource "aws_iam_role_policy_attachment" "infrastructure_rds_s3_backups_image_codebuild_cloudwatch_rw" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

role = aws_iam_role.infrastructure_rds_s3_backups_image_codebuild[0].name
policy_arn = aws_iam_policy.infrastructure_rds_s3_backups_image_codebuild_cloudwatch_rw[0].arn
}

resource "aws_iam_policy" "infrastructure_rds_s3_backups_image_codebuild_allow_builds" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix}-${substr(sha512("rds-s3-backups-image-codebuild-allow-builds"), 0, 6)}"
description = "${local.resource_prefix}-rds-s3-backups-image-codebuild-allow-builds"
policy = templatefile("${path.root}/policies/codebuild-allow-builds.json.tpl", {})
}

resource "aws_iam_role_policy_attachment" "infrastructure_rds_s3_backups_image_codebuild_allow_builds" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

role = aws_iam_role.infrastructure_rds_s3_backups_image_codebuild[0].name
policy_arn = aws_iam_policy.infrastructure_rds_s3_backups_image_codebuild_allow_builds[0].arn
}

resource "aws_iam_policy" "infrastructure_rds_s3_backups_image_codebuild_ecr_push" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix}-${substr(sha512("rds-s3-backups-image-codebuild-ecr-push"), 0, 6)}"
description = "${local.resource_prefix}-ecs-service-codepipeline-codebuild-${each.key}-ecr-push"
policy = templatefile(
"${path.root}/policies/ecr-push.json.tpl",
{ ecr_repository_arn = aws_ecr_repository.infrastructure_rds_s3_backups[0].repository_url }
)
}

resource "aws_iam_role_policy_attachment" "infrastructure_ecs_cluster_service_codebuild_ecr_push" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

role = aws_iam_role.infrastructure_rds_s3_backups_image_codebuild[0].name
policy_arn = aws_iam_policy.infrastructure_rds_s3_backups_image_codebuild_ecr_push[0].arn
}

resource "aws_codebuild_project" "infrastructure_rds_s3_backups_image_build" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix}-rds-s3-backups-image-build"
description = "${local.resource_prefix} RDS S3 Backups Image Build"
build_timeout = "20"
service_role = aws_iam_role.infrastructure_rds_s3_backups_image_codebuild[0].arn

artifacts {
type = "NO_ARTIFACTS"
}

environment {
compute_type = "BUILD_GENERAL1_SMALL"
image = "aws/codebuild/standard:7.0"
type = "LINUX_CONTAINER"
privileged_mode = true

environment_variable {
name = "AWS_ACCOUNT_ID"
value = local.aws_account_id
}

environment_variable {
name = "REPOSITORY_URI"
value = aws_ecr_repository.infrastructure_rds_s3_backups[0].repository_url
}
}

source {
type = "GITHUB"
location = "https://github.com/dxw/dalmatian-sql-backup"
git_clone_depth = 1
buildspec = templatefile("${path.root}/buildspecs/dalmatian-sql-backup.yml", {})
}
}

resource "terraform_data" "infrastructure_rds_s3_backups_image_build_trigger_codebuild" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

triggers_replace = [
md5(templatefile("${path.root}/buildspecs/dalmatian-sql-backup.yml", {})),
]

provisioner "local-exec" {
interpreter = ["/bin/bash", "-c"]
command = <<EOF
${path.root}/local-exec-scripts/trigger-codedeploy-project.sh \
-n "${aws_codebuild_project.infrastructure_rds_s3_backups_image_build[0].name}"
EOF
}
}

resource "aws_cloudwatch_event_rule" "infrastructure_rds_s3_backups_image_build_trigger_codebuild" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

name = "${local.resource_prefix_hash}-rds-s3-backups-image-build-trigger-codebuild"
description = "${local.resource_prefix} RDS S3 Backups Image Build Trigger CodeBuild"
schedule_expression = local.infrastructure_rds_backup_to_s3_cron_expression
}

resource "aws_cloudwatch_event_target" "infrastructure_rds_s3_backups_image_build_trigger_codebuild" {
count = local.enable_infrastructure_rds_backup_to_s3 ? 1 : 0

target_id = "${local.resource_prefix_hash}-rds-s3-backups-image-build-trigger-codebuild"
rule = aws_cloudwatch_event_rule.infrastructure_rds_s3_backups_image_build_trigger_codebuild[0].name
arn = aws_codebuild_project.infrastructure_rds_s3_backups_image_build[0].id
role_arn = aws_iam_role.infrastructure_rds_s3_backups_image_codebuild[0].arn
}
Loading

0 comments on commit f6bb1c0

Please sign in to comment.