Skip to content

Commit

Permalink
2118 au 05 audit process migrate logs to s3 buckets (#2248)
Browse files Browse the repository at this point in the history
* Create cloudwatchToS3lambda to export CloudWatch logs to an S3 bucket.

* Add aws-sdk dependencies to package / package-lock.

* Add cloudwatchToS3 lambda to functions.yml.

* Schedule cloudwatchToS3 to run every 4 hours using serverless.

* Create S3 bucket for CloudWatch Logs.

* Add CLOUDWATCH_BUCKET_NAME to env.yml.
  • Loading branch information
Matthew-Grayson authored Sep 28, 2023
1 parent efaea08 commit 24f6f9b
Show file tree
Hide file tree
Showing 9 changed files with 1,327 additions and 247 deletions.
2 changes: 2 additions & 0 deletions backend/env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ staging:
EXPORT_BUCKET_NAME: cisa-crossfeed-staging-exports
PE_API_URL: ${ssm:/crossfeed/staging/PE_API_URL}
REPORTS_BUCKET_NAME: cisa-crossfeed-staging-reports
CLOUDWATCH_BUCKET_NAME: cisa-crossfeed-staging-cloudwatch

prod:
DB_DIALECT: 'postgres'
Expand Down Expand Up @@ -76,6 +77,7 @@ prod:
EXPORT_BUCKET_NAME: cisa-crossfeed-prod-exports
PE_API_URL: ${ssm:/crossfeed/staging/PE_API_URL}
REPORTS_BUCKET_NAME: cisa-crossfeed-prod-reports
CLOUDWATCH_BUCKET_NAME: cisa-crossfeed-prod-cloudwatch

dev-vpc:
securityGroupIds:
Expand Down
1,396 changes: 1,150 additions & 246 deletions backend/package-lock.json

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
},
"engineStrict": true,
"dependencies": {
"@aws-sdk/client-cloudwatch-logs": "^3.417.0",
"@aws-sdk/client-ssm": "^3.414.0",
"@elastic/elasticsearch": "~7.10.0",
"@thefaultvault/tfv-cpe-parser": "^1.3.0",
"aws-sdk": "^2.1352.0",
Expand Down Expand Up @@ -105,4 +107,4 @@
},
"author": "",
"license": "ISC"
}
}
130 changes: 130 additions & 0 deletions backend/src/tasks/cloudwatchToS3.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
import {
CloudWatchLogsClient,
DescribeLogGroupsCommand,
DescribeLogGroupsRequest,
LogGroup,
ListTagsForResourceCommand,
CreateExportTaskCommand
} from '@aws-sdk/client-cloudwatch-logs';
import {
SSMClient,
GetParameterCommand,
PutParameterCommand
} from '@aws-sdk/client-ssm';

const logs = new CloudWatchLogsClient({});
const ssm = new SSMClient({});
const region = process.env.AWS_REGION || 'us-east-1';
const accountId = process.env.AWS_ACCOUNT_ID || '957221700844';
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));

export const handler = async () => {
const extra_args: DescribeLogGroupsRequest = {};
let log_groups: LogGroup[] = [];
const log_groups_to_export: string[] = [];

if (!process.env.CLOUDWATCH_BUCKET_NAME) {
console.error('Error: CLOUDWATCH_BUCKET_NAME not defined');
return;
}

console.log(
'--> CLOUDWATCH_BUCKET_NAME=' + process.env.CLOUDWATCH_BUCKET_NAME
);

while (true) {
const response = await logs.send(new DescribeLogGroupsCommand(extra_args));
log_groups = log_groups.concat(response.logGroups!);

if (!response.nextToken) {
break;
}
extra_args.nextToken = response.nextToken;
}

for (const log_group of log_groups) {
const command = new ListTagsForResourceCommand({
resourceArn: `arn:aws:logs:${region}:${accountId}:log-group:${log_group.logGroupName}`
});
const response = await logs.send(command);
const log_group_tags = response.tags || {};

if (log_group_tags.ExportToS3 === 'true') {
log_groups_to_export.push(log_group.logGroupName!);
}
await delay(10 * 1000); // prevents LimitExceededException (AWS allows only one export task at a time)
}

for (const log_group_name of log_groups_to_export) {
const ssm_parameter_name = (
'/log-exporter-last-export/' + log_group_name
).replace('//', '/');
let ssm_value = '0';

try {
const ssm_response = await ssm.send(
new GetParameterCommand({ Name: ssm_parameter_name })
);
ssm_value = ssm_response.Parameter?.Value || '0';
} catch (error) {
if (error.name !== 'ParameterNotFound') {
console.error('Error fetching SSM parameter: ' + error.message);
}
}

const export_to_time = Math.round(Date.now());

console.log(
'--> Exporting ' +
log_group_name +
' to ' +
process.env.CLOUDWATCH_BUCKET_NAME
);

if (export_to_time - parseInt(ssm_value) < 24 * 60 * 60 * 1000) {
// Haven't been 24hrs from the last export of this log group
console.log(' Skipped until 24hrs from last export is completed');
continue;
}

try {
const response = await logs.send(
new CreateExportTaskCommand({
logGroupName: log_group_name,
from: parseInt(ssm_value),
to: export_to_time,
destination: process.env.CLOUDWATCH_BUCKET_NAME,
destinationPrefix: log_group_name
.replace(/^\//, '')
.replace(/\/$/, '')
})
);

console.log(' Task created: ' + response.taskId);
await new Promise((resolve) => setTimeout(resolve, 5000));
} catch (error) {
if (error.name === 'LimitExceededException') {
console.log(
' Need to wait until all tasks are finished (LimitExceededException). Continuing later...'
);
return;
}
console.error(
' Error exporting ' +
log_group_name +
': ' +
(error.message || JSON.stringify(error))
);
continue;
}

await ssm.send(
new PutParameterCommand({
Name: ssm_parameter_name,
Type: 'String',
Value: export_to_time.toString(),
Overwrite: true
})
);
}
};
8 changes: 8 additions & 0 deletions backend/src/tasks/functions.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
cloudwatchToS3:
handler: src/tasks/cloudwatchToS3.handler
timeout: 900
events:
- schedule: rate(4 hours)
reservedConcurrency: 1
memorySize: 4096

scheduler:
handler: src/tasks/scheduler.handler
timeout: 900
Expand Down
18 changes: 18 additions & 0 deletions infrastructure/cloudwatch.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
resource "aws_s3_bucket" "cloudwatch_bucket" {
bucket = var.cloudwatch_bucket_name
tags = {
project = var.project
stage = var.stage
}
}

# TODO: update retention_in_days based on developing requirements
resource "aws_cloudwatch_log_group" "cloudwatch_bucket" {
name = var.cloudwatch_log_group_name
retention_in_days = 365
kms_key_id = aws_kms_key.key.arn
tags = {
project = var.project
stage = var.stage
}
}
2 changes: 2 additions & 0 deletions infrastructure/prod.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ logging_bucket_name = "cisa-crossfeed-prod-logging"
cloudtrail_bucket_name = "cisa-crossfeed-prod-cloudtrail"
cloudtrail_role_name = "cisa-crossfeed-prod-cloudtrail"
cloudtrail_log_group_name = "cisa-crossfeed-prod-cloudtrail"
cloudwatch_bucket_name = "cisa-crossfeed-prod-cloudwatch"
cloudwatch_log_group_name = "crossfeed-prod-cloudwatch-bucket"
export_bucket_name = "cisa-crossfeed-prod-exports"
reports_bucket_name = "cisa-crossfeed-prod-reports"
pe_db_backups_bucket_name = "cisa-crossfeed-prod-pe-db-backups"
Expand Down
2 changes: 2 additions & 0 deletions infrastructure/stage.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ logging_bucket_name = "cisa-crossfeed-staging-logging"
cloudtrail_bucket_name = "cisa-crossfeed-staging-cloudtrail"
cloudtrail_role_name = "cisa-crossfeed-staging-cloudtrail"
cloudtrail_log_group_name = "cisa-crossfeed-staging-cloudtrail"
cloudwatch_bucket_name = "cisa-crossfeed-staging-cloudwatch"
cloudwatch_log_group_name = "crossfeed-staging-cloudwatch-bucket"
export_bucket_name = "cisa-crossfeed-staging-exports"
reports_bucket_name = "cisa-crossfeed-staging-reports"
pe_db_backups_bucket_name = "cisa-crossfeed-staging-pe-db-backups"
Expand Down
12 changes: 12 additions & 0 deletions infrastructure/vars.tf
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,18 @@ variable "cloudtrail_log_group_name" {
default = "crossfeed-staging-cloudtrail-logs"
}

variable "cloudwatch_bucket_name" {
description = "cloudwatch_bucket_name"
type = string
default = "cisa-crossfeed-staging-cloudwatch"
}

variable "cloudwatch_log_group_name" {
description = "cloudwatch_log_group_name"
type = string
default = "crossfeed-staging-cloudwatch-bucket"
}

variable "export_bucket_name" {
description = "export_bucket_name"
type = string
Expand Down

0 comments on commit 24f6f9b

Please sign in to comment.