From 0a288d13308e5696c351f78b7a8c89d3ca2a8760 Mon Sep 17 00:00:00 2001 From: Joseph Ivie Date: Thu, 17 Oct 2024 08:54:13 -0600 Subject: [PATCH 1/4] All terms required fix --- demo/build.gradle.kts | 2 + demo/src/main/kotlin/main.kt | 17 +- demo/terraform/example/alarms.tf | 112 - demo/terraform/example/cloud.tf | 53 +- demo/terraform/example/files.tf | 4 - demo/terraform/example/general.tf | 2 + demo/terraform/example/lambda.tf | 21 +- demo/terraform/example/main.tf | 16 +- demo/terraform/example/metrics.tf | 6 +- demo/terraform/example/project.json | 1 + .../lightningserver/aws/terraform.kt | 2543 ----------------- .../lightningserver/aws/terraform/Models.kt | 46 + .../aws/terraform/TerraformHandler.kt | 42 + .../aws/terraform/TerraformInput.kt | 30 + .../aws/terraform/TerraformOutput.kt | 7 + .../aws/terraform/TerraformProvider.kt | 16 + .../TerraformRequirementBuildInfo.kt | 17 + .../aws/terraform/TerraformSection.kt | 42 + .../aws/terraform/Validation.kt | 6 + .../aws/terraform/commonAws.kt | 184 ++ .../lightningserver/aws/terraform/lambda.kt | 984 +++++++ .../lightningserver/aws/terraform/main.kt | 226 ++ .../aws/terraform/settingsHandlers.kt | 890 ++++++ .../aws/terraform/singleec2.kt | 217 ++ .../lightningserver/aws/terraformAws.kt | 7 + .../lightningserver/serialization/CSV2Test.kt | 20 + .../lightningdb/SearchTextConditionTest.kt | 10 + 27 files changed, 2783 insertions(+), 2738 deletions(-) delete mode 100644 demo/terraform/example/alarms.tf delete mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Models.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformHandler.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformInput.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformOutput.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformProvider.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformRequirementBuildInfo.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformSection.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Validation.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/commonAws.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/lambda.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/main.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/singleec2.kt create mode 100644 server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraformAws.kt diff --git a/demo/build.gradle.kts b/demo/build.gradle.kts index f5102f0ef..bda456a2f 100644 --- a/demo/build.gradle.kts +++ b/demo/build.gradle.kts @@ -61,6 +61,7 @@ tasks.create("rebuildTerraform", JavaExec::class.java) { mainClass.set("com.lightningkite.lightningserverdemo.MainKt") args("terraform") workingDir(project.rootDir) + inputs.files(*file("terraform").walkTopDown().filter { it.name == "project.json" }.toList().toTypedArray()) } fun env(name: String, profile: String) { @@ -94,6 +95,7 @@ fun env(name: String, profile: String) { } } env("example", "default") +env("lkec2", "lk") tasks.create("proguardTest", ProGuardTask::class) { this.injars(tasks.getByName("shadowJar")) diff --git a/demo/src/main/kotlin/main.kt b/demo/src/main/kotlin/main.kt index b1d4b215a..f25a72a8d 100644 --- a/demo/src/main/kotlin/main.kt +++ b/demo/src/main/kotlin/main.kt @@ -3,8 +3,6 @@ package com.lightningkite.lightningserverdemo import com.lightningkite.kotlinercli.cli -import com.lightningkite.lightningserver.aws.terraformMigrate -import com.lightningkite.lightningserver.aws.terraformAws import com.lightningkite.lightningserver.cache.* import com.lightningkite.lightningserver.files.ServerFile import com.lightningkite.lightningserver.ktor.runServer @@ -16,6 +14,7 @@ import java.io.File import kotlinx.datetime.Instant import java.util.* import com.lightningkite.UUID +import com.lightningkite.lightningserver.aws.terraform.createTerraform fun setup() { Server @@ -28,23 +27,13 @@ private fun serve() { fun terraform() { Server - terraformAws("com.lightningkite.lightningserverdemo.AwsHandler", "demo", File("demo/terraform2")) -} - -fun tfMigrate() { - Server - terraformMigrate("com.lightningkite.lightningserverdemo.AwsHandler", File("demo/terraform")) + createTerraform("com.lightningkite.lightningserverdemo.AwsHandler", "demo", File("demo/terraform")) } fun main(vararg args: String) { cli( arguments = args, setup = ::setup, - available = listOf(::serve, ::terraform, ::tfMigrate, ::dbTest), + available = listOf(::serve, ::terraform), ) } - -fun dbTest(): Unit = runBlocking { - Server - loadSettings(File("settings.json")) -} diff --git a/demo/terraform/example/alarms.tf b/demo/terraform/example/alarms.tf deleted file mode 100644 index aeafc90d8..000000000 --- a/demo/terraform/example/alarms.tf +++ /dev/null @@ -1,112 +0,0 @@ -# Generated via Lightning Server. This file will be overwritten or deleted when regenerating. -########## -# Inputs -########## - -variable "emergencyInvocationsPerMinuteThreshold" { - type = number - default = 100 - nullable = false -} -variable "emergencyComputePerMinuteThreshold" { - type = number - default = 10000 - nullable = false -} -variable "panicInvocationsPerMinuteThreshold" { - type = number - default = 500 - nullable = false -} -variable "panicComputePerMinuteThreshold" { - type = number - default = 50000 - nullable = false -} -variable "emergencyContact" { - type = string - nullable = false -} - -########## -# Outputs -########## - - -########## -# Resources -########## - -resource "aws_sns_topic" "emergency" { - name = "demo-example_emergencies" -} -resource "aws_sns_topic_subscription" "emergency_primary" { - topic_arn = aws_sns_topic.emergency.arn - protocol = "email" - endpoint = var.emergencyContact -} -resource "aws_cloudwatch_metric_alarm" "emergency_invocations" { - alarm_name = "demo-example_emergency_invocations" - comparison_operator = "GreaterThanOrEqualToThreshold" - evaluation_periods = "1" - metric_name = "Invocations" - namespace = "AWS/Lambda" - period = "60" - statistic = "Sum" - threshold = "${var.emergencyInvocationsPerMinuteThreshold}" - alarm_description = "" - insufficient_data_actions = [] - dimensions = { - FunctionName = aws_lambda_function.main.function_name - } - alarm_actions = [aws_sns_topic.emergency.arn] -} -resource "aws_cloudwatch_metric_alarm" "emergency_compute" { - alarm_name = "demo-example_emergency_compute" - comparison_operator = "GreaterThanOrEqualToThreshold" - evaluation_periods = "1" - metric_name = "Duration" - namespace = "AWS/Lambda" - period = "60" - statistic = "Sum" - threshold = "${var.emergencyComputePerMinuteThreshold}" - alarm_description = "" - insufficient_data_actions = [] - dimensions = { - FunctionName = aws_lambda_function.main.function_name - } - alarm_actions = [aws_sns_topic.emergency.arn] -} -resource "aws_cloudwatch_metric_alarm" "panic_invocations" { - alarm_name = "demo-example_panic_invocations" - comparison_operator = "GreaterThanOrEqualToThreshold" - evaluation_periods = "1" - metric_name = "Invocations" - namespace = "AWS/Lambda" - period = "60" - statistic = "Sum" - threshold = "${var.panicInvocationsPerMinuteThreshold}" - alarm_description = "" - insufficient_data_actions = [] - dimensions = { - FunctionName = aws_lambda_function.main.function_name - } - alarm_actions = [aws_sns_topic.emergency.arn] -} -resource "aws_cloudwatch_metric_alarm" "panic_compute" { - alarm_name = "demo-example_panic_compute" - comparison_operator = "GreaterThanOrEqualToThreshold" - evaluation_periods = "1" - metric_name = "Duration" - namespace = "AWS/Lambda" - period = "60" - statistic = "Sum" - threshold = "${var.panicComputePerMinuteThreshold}" - alarm_description = "" - insufficient_data_actions = [] - dimensions = { - FunctionName = aws_lambda_function.main.function_name - } - alarm_actions = [aws_sns_topic.emergency.arn] -} - diff --git a/demo/terraform/example/cloud.tf b/demo/terraform/example/cloud.tf index e7bcc2c27..2483f452b 100644 --- a/demo/terraform/example/cloud.tf +++ b/demo/terraform/example/cloud.tf @@ -7,11 +7,13 @@ variable "deployment_location" { type = string default = "us-west-2" nullable = false + description = "The AWS region key to deploy all resources in." } variable "debug" { type = bool default = false nullable = false + description = "The GeneralSettings debug. Debug true will turn on various things during run time for easier development and bug tracking. Should be false for production environments." } variable "ip_prefix" { type = string @@ -21,10 +23,12 @@ variable "ip_prefix" { variable "domain_name_zone" { type = string nullable = false + description = "The AWS Hosted zone the domain will be placed under." } variable "domain_name" { type = string nullable = false + description = "The domain the server will be hosted at." } ########## @@ -36,55 +40,6 @@ variable "domain_name" { # Resources ########## - -resource "aws_api_gateway_account" "main" { - cloudwatch_role_arn = aws_iam_role.cloudwatch.arn -} - -resource "aws_iam_role" "cloudwatch" { - name = "demoexample" - - assume_role_policy = < = mapOf(), -) { -} - -internal val TerraformProjectInfo.privateSubnets get() = if (existingVpc) "[for s in data.aws_subnet.private : s.id]" else "module.vpc.private_subnets" -internal val TerraformProjectInfo.subnet_cidr_blocks get() = if (existingVpc) "[for s in data.aws_subnet.private : s.cidr_block]" else "concat(module.vpc.private_subnets_cidr_blocks, module.vpc.private_subnets_cidr_blocks, [])" -internal val TerraformProjectInfo.vpc_id get() = if (existingVpc) "data.aws_vpc.main.id" else "module.vpc.vpc_id" -internal val TerraformProjectInfo.vpc_cidr_block get() = if (existingVpc) "data.aws_vpc.main.cidr_block" else "module.vpc.vpc_cidr_block" -internal val TerraformProjectInfo.public_route_table_ids get() = if (existingVpc) "toset([data.aws_vpc.main.main_route_table_id])" else "module.vpc.public_route_table_ids" -internal val TerraformProjectInfo.natGatewayIp get() = if (existingVpc) "[for s in data.aws_nat_gateway.main : s.public_ip]" else "module.vpc.nat_public_ips" - -internal val TerraformProjectInfo.projectNameSafe: String - get() = projectName.filter { - it.isLetterOrDigit() || it in setOf( - '-', - '_' - ) - } -internal val TerraformProjectInfo.namePrefix: String get() = projectNameSafe -internal val TerraformProjectInfo.namePrefixLower: String get() = projectNameSafe.lowercase() -internal val TerraformProjectInfo.namePrefixUnderscores: String get() = projectNameSafe.replace("-", "_") -internal val TerraformProjectInfo.namePrefixSafe: String get() = projectNameSafe.filter { it.isLetterOrDigit() } -internal val TerraformProjectInfo.namePrefixPath: String - get() = projectNameSafe.lowercase().replace("-", "/").replace("_", "") -internal val TerraformProjectInfo.namePrefixPathSegment: String get() = projectNameSafe.lowercase().replace("_", "") - -internal data class TerraformRequirementBuildInfo( - val project: TerraformProjectInfo, - val name: String, - val appendable: Appendable, -) : Appendable by appendable { - val key: String get() = name -} - -internal val TerraformRequirementBuildInfo.namePrefix: String get() = project.namePrefix -internal val TerraformRequirementBuildInfo.namePrefixLower: String get() = project.namePrefixLower -internal val TerraformRequirementBuildInfo.namePrefixUnderscores: String get() = project.namePrefixUnderscores -internal val TerraformRequirementBuildInfo.namePrefixSafe: String get() = project.namePrefixSafe -internal val TerraformRequirementBuildInfo.namePrefixPath: String get() = project.namePrefixPath -internal val TerraformRequirementBuildInfo.namePrefixPathSegment: String get() = project.namePrefixPathSegment - -internal data class TerraformProvider( - val name: String, - val source: String, - val version: String, -) { - companion object { - val aws = TerraformProvider("aws", "hashicorp/aws", "~> 4.30") - val random = TerraformProvider("random", "hashicorp/random", "~> 3.1.0") - val archive = TerraformProvider("archive", "hashicorp/archive", "~> 2.2.0") - val mongodbatlas = TerraformProvider("mongodbatlas", "mongodb/mongodbatlas", "~> 1.4") - val local = TerraformProvider("local", "hashicorp/local", "~> 2.2") - val nullProvider = TerraformProvider("null", "hashicorp/null", "~> 3.2") - } -} - -internal data class TerraformSection( - val name: String, - val providers: List = listOf( - TerraformProvider.aws, - TerraformProvider.local, - TerraformProvider.random, - TerraformProvider.nullProvider, - TerraformProvider.archive - ), - val inputs: List = listOf(), - val emit: Appendable.() -> Unit = {}, - val toLightningServer: Map? = null, - val outputs: List = listOf(), -) { - companion object { - - fun default(setting: Settings.Requirement) = TerraformSection( - name = setting.name, - inputs = listOf( - TerraformInput( - name = setting.name, - type = "any", - default = setting.default.let { - Serialization.Internal.json.encodeToString( - setting.serializer, - it - ) - }, - nullable = setting.serializer.descriptor.isNullable, - description = setting.description - ), - ), - toLightningServer = mapOf(setting.name to "var.${setting.name}") - ) - } -} - -internal data class TerraformHandler( - val name: String, - val priority: Int = 0, - val makeSection: TerraformProjectInfo.(settingKey: String) -> TerraformSection, -) { - companion object { - val handlers = - HashMap, HashMap>() - - inline fun handler( - name: String = "Standard", - priority: Int = 0, - providers: List = listOf( - TerraformProvider.aws, - TerraformProvider.random, - TerraformProvider.archive - ), - noinline inputs: TerraformProjectInfo.(settingKey: String) -> List = { listOf() }, - noinline emit: TerraformRequirementBuildInfo.() -> Unit = { }, - noinline settingOutput: TerraformProjectInfo.(settingKey: String) -> String, - ) { - handlers.getOrPut(serializer()) { HashMap() }.put(name, TerraformHandler(name, priority) { it -> - TerraformSection( - name = it, - providers = providers, - inputs = inputs(this, it), - emit = { emit(TerraformRequirementBuildInfo(this@TerraformHandler, it, this)) }, - toLightningServer = mapOf(it to settingOutput(this, it)), - outputs = listOf() - ) - }) - } - } -} - -internal data class Validation( - val condition: String, - val errorMessage: String, -) - -internal data class TerraformInput( - val name: String, - val type: String, - val default: String?, - val nullable: Boolean = false, - val description: String? = null, - val validations: List = emptyList(), -) { - companion object { - fun stringList(name: String, default: List?, nullable: Boolean = false, description: String? = null) = - TerraformInput( - name, - "list(string)", - default?.joinToString(", ", "[", "]") { "\"$it\"" }, - nullable = nullable, - description = description, - ) - - fun string(name: String, default: String?, nullable: Boolean = false, description: String? = null) = - TerraformInput(name, "string", default?.let { "\"$it\"" }, nullable = nullable, description = description) - - fun boolean(name: String, default: Boolean?, nullable: Boolean = false, description: String? = null) = - TerraformInput(name, "bool", default?.toString(), nullable = nullable, description = description) - - fun number(name: String, default: Number?, nullable: Boolean = false, description: String? = null) = - TerraformInput(name, "number", default?.toString(), nullable = nullable, description = description) - } -} - -internal data class TerraformOutput(val name: String, val value: String) { - companion object { - - } -} - -internal fun handlers() { - TerraformHandler.handler( - inputs = { - listOf( - TerraformInput( - name = "cors", - type = "object({ allowedDomains = list(string), allowedHeaders = list(string) })", - default = "null", - nullable = true, - description = "Defines the cors rules for the server." - ), - TerraformInput.string( - "display_name", - projectName, - description = "The GeneralSettings projectName." - ) - ) - }, - settingOutput = { - """ - { - projectName = var.display_name - publicUrl = ${if (domain) "\"https://${'$'}{var.domain_name}\"" else "aws_apigatewayv2_stage.http.invoke_url"} - wsUrl = ${if (domain) "\"wss://ws.${'$'}{var.domain_name}?path=\"" else "\"\${aws_apigatewayv2_stage.ws.invoke_url}?path=\""} - debug = var.debug - cors = var.cors - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "S3", - inputs = { key -> - listOf( - TerraformInput.string("${key}_expiry", "P1D", nullable = true) - ) - }, - emit = { - appendLine( - """ - resource "aws_s3_bucket" "${key}" { - bucket_prefix = "${namePrefixPathSegment}-${key.lowercase()}" - force_destroy = var.debug - } - resource "aws_s3_bucket_cors_configuration" "${key}" { - bucket = aws_s3_bucket.${key}.bucket - - cors_rule { - allowed_headers = ["*"] - allowed_methods = ["PUT", "POST"] - allowed_origins = ["*"] - expose_headers = ["ETag"] - max_age_seconds = 3000 - } - - cors_rule { - allowed_headers = ["*"] - allowed_methods = ["GET", "HEAD"] - allowed_origins = ["*"] - } - } - resource "aws_s3_bucket_public_access_block" "$key" { - count = var.${key}_expiry == null ? 1 : 0 - bucket = aws_s3_bucket.$key.id - - block_public_acls = false - block_public_policy = false - ignore_public_acls = false - restrict_public_buckets = false - } - resource "aws_s3_bucket_policy" "$key" { - depends_on = [aws_s3_bucket_public_access_block.${key}] - count = var.${key}_expiry == null ? 1 : 0 - bucket = aws_s3_bucket.$key.id - policy = < - """ - { - storageUrl = "s3://${'$'}{aws_s3_bucket.${key}.id}.s3-${'$'}{aws_s3_bucket.${key}.region}.amazonaws.com" - signedUrlExpiration = var.${key}_expiry - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "DocumentDB", - inputs = { key -> - listOf(TerraformInput.string("${key}_instance_class", "db.t4g.medium")) - }, - emit = { - if (!project.vpc) throw UnsupportedOperationException("DocumentDB requires VPC") - appendLine( - """ - resource "random_password" "${key}" { - length = 32 - special = true - override_special = "-_" - } - resource "aws_docdb_subnet_group" "${key}" { - name = "$namePrefix-${key}" - subnet_ids = ${project.privateSubnets} - } - resource "aws_docdb_cluster_parameter_group" "${key}" { - family = "docdb4.0" - name = "$namePrefix-${key}-parameter-group" - parameter { - name = "tls" - value = "disabled" - } - } - resource "aws_docdb_cluster" "${key}" { - cluster_identifier = "${namePrefix}-${key}" - engine = "docdb" - master_username = "master" - master_password = random_password.${key}.result - backup_retention_period = 5 - preferred_backup_window = "07:00-09:00" - skip_final_snapshot = true - - db_cluster_parameter_group_name = "${'$'}{aws_docdb_cluster_parameter_group.${key}.name}" - vpc_security_group_ids = [aws_security_group.internal.id] - db_subnet_group_name = "${'$'}{aws_docdb_subnet_group.${key}.name}" - } - resource "aws_docdb_cluster_instance" "${key}" { - count = 1 - identifier = "$namePrefix-${key}-${'$'}{count.index}" - cluster_identifier = "${'$'}{aws_docdb_cluster.${key}.id}" - instance_class = "db.t4g.medium" - } - """.trimIndent() - ) - }, - settingOutput = { key -> - """ - { - url = "mongodb://master:${'$'}{random_password.${key}.result}@${'$'}{aws_docdb_cluster_instance.${key}[0].endpoint}/?retryWrites=false" - databaseName = "${namePrefix}_${key}" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "AuroraDB Serverless V1", - priority = 1, - inputs = { key -> - listOf( - TerraformInput.number("${key}_min_capacity", 2), - TerraformInput.number("${key}_max_capacity", 4), - TerraformInput.boolean("${key}_auto_pause", true) - ) - }, - emit = { - if (!project.vpc) throw UnsupportedOperationException("DocumentDB requires VPC") - appendLine( - """ - resource "random_password" "${key}" { - length = 32 - special = true - override_special = "-_" - } - resource "aws_db_subnet_group" "${key}" { - name = "$namePrefix-${key}" - subnet_ids = ${project.privateSubnets} - } - resource "aws_rds_cluster" "$key" { - cluster_identifier = "$namePrefix-${key}" - engine = "aurora-postgresql" - engine_mode = "serverless" - engine_version = "10.18" - database_name = "$namePrefixSafe${key}" - master_username = "master" - master_password = random_password.${key}.result - skip_final_snapshot = var.debug - final_snapshot_identifier = "$namePrefix-${key}" - enable_http_endpoint = true - vpc_security_group_ids = [aws_security_group.internal.id] - db_subnet_group_name = "${'$'}{aws_db_subnet_group.${key}.name}" - - scaling_configuration { - auto_pause = var.${key}_auto_pause - min_capacity = var.${key}_min_capacity - max_capacity = var.${key}_max_capacity - seconds_until_auto_pause = 300 - timeout_action = "ForceApplyCapacityChange" - } - } - """.trimIndent() - ) - }, - settingOutput = { key -> - """ - { - url = "postgresql://master:${'$'}{random_password.${key}.result}@${'$'}{aws_rds_cluster.database.endpoint}/$namePrefixSafe${key}" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "AuroraDB Serverless V2", - priority = 2, - inputs = { key -> - listOf( - TerraformInput.number("${key}_min_capacity", 0.5), - TerraformInput.number("${key}_max_capacity", 2), - TerraformInput.boolean("${key}_auto_pause", true) - ) - }, - emit = { - if (project.vpc) { - appendLine( - """ - resource "aws_db_subnet_group" "${key}" { - name = "$namePrefix-${key}" - subnet_ids = ${project.privateSubnets} - } - """.trimIndent() - ) - } - appendLine( - """ - resource "random_password" "${key}" { - length = 32 - special = true - override_special = "-_" - } - resource "aws_rds_cluster" "$key" { - cluster_identifier = "$namePrefix-${key}" - engine = "aurora-postgresql" - engine_mode = "provisioned" - engine_version = "13.6" - database_name = "$namePrefixSafe${key}" - master_username = "master" - master_password = random_password.${key}.result - skip_final_snapshot = var.debug - final_snapshot_identifier = "$namePrefix-${key}" - ${if (project.vpc) """vpc_security_group_ids = [aws_security_group.internal.id]""" else ""} - ${if (project.vpc) """db_subnet_group_name = "${'$'}{aws_db_subnet_group.${key}.name}"""" else ""} - - serverlessv2_scaling_configuration { - min_capacity = var.${key}_min_capacity - max_capacity = var.${key}_max_capacity - } - } - - resource "aws_rds_cluster_instance" "$key" { - publicly_accessible = ${!project.vpc} - cluster_identifier = aws_rds_cluster.$key.id - instance_class = "db.serverless" - engine = aws_rds_cluster.$key.engine - engine_version = aws_rds_cluster.$key.engine_version - ${if (project.vpc) """db_subnet_group_name = "${'$'}{aws_db_subnet_group.${key}.name}"""" else ""} - } - """.trimIndent() - ) - }, - settingOutput = { key -> - //url = "${'$'}{var.${key}_auto_pause ? "auroradb-autopause" : "postgresql"}://master:${'$'}{random_password.${key}.result}@${'$'}{aws_rds_cluster.database.endpoint}/$namePrefixSafe${key}" - """ - { - url = "postgresql://master:${'$'}{random_password.${key}.result}@${'$'}{aws_rds_cluster.database.endpoint}/$namePrefixSafe${key}" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "MongoDB Serverless", - priority = 0, - providers = listOf(TerraformProvider.mongodbatlas), - inputs = { key -> - listOf( - TerraformInput.string("${key}_org_id", null), - TerraformInput.boolean("${key}_continuous_backup", false), -// TerraformInput.string("${key}_team_id", null) - ) - }, - emit = { - appendLine( - """ - resource "mongodbatlas_project" "$key" { - name = "$namePrefixSafe$key" - org_id = var.${key}_org_id - - is_collect_database_specifics_statistics_enabled = true - is_data_explorer_enabled = true - is_performance_advisor_enabled = true - is_realtime_performance_panel_enabled = true - is_schema_advisor_enabled = true - } - resource "random_password" "${key}" { - length = 32 - special = true - override_special = "-_" - } - resource "mongodbatlas_serverless_instance" "$key" { - project_id = mongodbatlas_project.$key.id - name = "$namePrefixSafe$key" - - provider_settings_backing_provider_name = "AWS" - provider_settings_provider_name = "SERVERLESS" - provider_settings_region_name = replace(upper(var.deployment_location), "-", "_") - - continuous_backup_enabled = var.${key}_continuous_backup - } - resource "mongodbatlas_database_user" "$key" { - username = "$namePrefixSafe$key-main" - password = random_password.$key.result - project_id = mongodbatlas_project.$key.id - auth_database_name = "admin" - - roles { - role_name = "readWrite" - database_name = "default" - } - - roles { - role_name = "readAnyDatabase" - database_name = "admin" - } - - } - """.trimIndent() - ) - if (project.vpc) { - appendLine( - """ - resource "mongodbatlas_project_ip_access_list" "$key" { - for_each = toset(${project.natGatewayIp}) - project_id = mongodbatlas_project.$key.id - cidr_block = "${'$'}{each.value}/32" - comment = "NAT Gateway" - } - """.trimIndent() - ) - } else { - appendLine( - """ - resource "mongodbatlas_project_ip_access_list" "$key" { - project_id = mongodbatlas_project.$key.id - cidr_block = "0.0.0.0/0" - comment = "Anywhere" - } - """.trimIndent() - ) - } - }, - settingOutput = { key -> - """ - { - url = "mongodb+srv://$namePrefixSafe$key-main:${'$'}{random_password.${key}.result}@${'$'}{replace(mongodbatlas_serverless_instance.$key.connection_strings_standard_srv, "mongodb+srv://", "")}/default?retryWrites=true&w=majority" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "MongoDB Dedicated", - priority = 0, - providers = listOf(TerraformProvider.mongodbatlas), - inputs = { key -> - listOf( - TerraformInput.string("${key}_org_id", null), - TerraformInput.string("${key}_min_size", "M10"), - TerraformInput.string("${key}_max_size", "M40") - ) - }, - emit = { - appendLine( - """ - resource "mongodbatlas_project" "$key" { - name = "$namePrefixSafe$key" - org_id = var.${key}_org_id - - is_collect_database_specifics_statistics_enabled = true - is_data_explorer_enabled = true - is_performance_advisor_enabled = true - is_realtime_performance_panel_enabled = true - is_schema_advisor_enabled = true - } - resource "random_password" "${key}" { - length = 32 - special = true - override_special = "-_" - } - resource "mongodbatlas_advanced_cluster" "database" { - project_id = mongodbatlas_project.database.id - name = "$namePrefixSafe$key" - cluster_type = "REPLICASET" - # lifecycle { ignore_changes = [instance_size] } - replication_specs { - region_configs { - auto_scaling { - compute_enabled = true - compute_min_instance_size = "M10" - compute_max_instance_size = var.${key}_max_size - compute_scale_down_enabled = true - disk_gb_enabled = true - } - electable_specs { - instance_size = var.${key}_min_size - node_count = 3 - } - analytics_specs { - instance_size = var.${key}_min_size - node_count = 1 - } - priority = 7 - provider_name = "AWS" - region_name = replace(upper(var.deployment_location), "-", "_") - } - } - } - resource "mongodbatlas_database_user" "$key" { - username = "$namePrefixSafe$key-main" - password = random_password.$key.result - project_id = mongodbatlas_project.$key.id - auth_database_name = "admin" - - roles { - role_name = "readWrite" - database_name = "default" - } - - roles { - role_name = "readAnyDatabase" - database_name = "admin" - } - - } - """.trimIndent() - ) - if (project.vpc) { - appendLine( - """ - resource "mongodbatlas_project_ip_access_list" "$key" { - for_each = toset(${project.natGatewayIp}) - project_id = mongodbatlas_project.$key.id - cidr_block = "${'$'}{each.value}/32" - comment = "NAT Gateway" - } - """.trimIndent() - ) - } else { - appendLine( - """ - resource "mongodbatlas_project_ip_access_list" "$key" { - project_id = mongodbatlas_project.$key.id - cidr_block = "0.0.0.0/0" - comment = "Anywhere" - } - """.trimIndent() - ) - } - }, - settingOutput = { key -> - """ - { - url = "mongodb+srv://$namePrefixSafe$key-main:${'$'}{random_password.${key}.result}@${'$'}{replace(mongodbatlas_advanced_cluster.$key.connection_strings_standard_srv, "mongodb+srv://", "")}/default?retryWrites=true&w=majority" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "ElastiCache", - inputs = { key -> - listOf( - TerraformInput.string("${key}_node_type", "cache.t2.micro"), - TerraformInput.number("${key}_node_count", 1) - ) - }, - emit = { - if (!project.vpc) throw IllegalArgumentException("A VPC is required for ElastiCache for security purposes.") - appendLine( - """ - resource "aws_elasticache_cluster" "${key}" { - cluster_id = "${namePrefix}-${key}" - engine = "memcached" - node_type = var.${key}_node_type - num_cache_nodes = var.${key}_node_count - parameter_group_name = "default.memcached1.6" - port = 11211 - security_group_ids = [aws_security_group.internal.id] - subnet_group_name = aws_elasticache_subnet_group.${key}.name - } - resource "aws_elasticache_subnet_group" "${key}" { - name = "$namePrefix-${key}" - subnet_ids = ${project.privateSubnets} - } - """.trimIndent() - ) - }, - settingOutput = { key -> - """ - { - url = "memcached-aws://${'$'}{aws_elasticache_cluster.${key}.cluster_address}:11211" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "DynamoDB", - priority = 1, - settingOutput = { _ -> - """ - { - url = "dynamodb://${'$'}{var.deployment_location}/${namePrefixUnderscores}" - } - """.trimIndent() - } - ) - TerraformHandler.handler( - inputs = { key -> - listOf( - TerraformInput.string("${key}_expiration", "PT8760H"), - TerraformInput.string("${key}_emailExpiration", "PT1H"), - ) - }, - emit = { - appendLine( - """ - resource "random_password" "${key}" { - length = 32 - special = true - override_special = "!#${'$'}%&*()-_=+[]{}<>:?" - } - """.trimIndent() - ) - }, - settingOutput = { key -> - """ - { - expiration = var.${key}_expiration - emailExpiration = var.${key}_emailExpiration - secret = random_password.${key}.result - } - """.trimIndent() - } - ) - TerraformHandler.handler( - inputs = { _ -> - listOf( - ) - }, - emit = { - appendLine( - """ - resource "random_password" "${key}" { - length = 88 - special = true - override_special = "+/" - } - """.trimIndent() - ) - }, - settingOutput = { key -> - """ - random_password.${key}.result - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "Cloudwatch", - inputs = { key -> - listOf( - TerraformInput.stringList("${key}_tracked", MetricType.known.map { it.name }), - TerraformInput.string("${key}_namespace", this.projectName), - ) - }, - emit = { - appendLine( - """ - resource "aws_iam_policy" "${key}" { - name = "${namePrefix}-${key}" - path = "/${namePrefixPath}/${key}/" - description = "Access to publish metrics" - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Action = [ - "cloudwatch:PutMetricData", - ] - Effect = "Allow" - Condition = { - StringEquals = { - "cloudwatch:namespace": var.${key}_namespace - } - } - Resource = ["*"] - }, - ] - }) - } - resource "aws_iam_role_policy_attachment" "${key}" { - role = aws_iam_role.main_exec.name - policy_arn = aws_iam_policy.${key}.arn - } - """.trimIndent() - ) - }, - settingOutput = { key -> - """ - { - url = "cloudwatch://${'$'}{var.deployment_location}/${'$'}{var.${key}_namespace}" - trackingByEntryPoint = var.${key}_tracked - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "SMTP through SES", - inputs = { key -> - if (domain) { - listOf( - TerraformInput.string("reporting_email", null) - ) - } else { - listOf( - TerraformInput.string("${key}_sender", null) - ) - } - }, - emit = { - appendLine( - """ - resource "aws_iam_user" "${key}" { - name = "${namePrefix}-${key}-user" - } - - resource "aws_iam_access_key" "${key}" { - user = aws_iam_user.${key}.name - } - - data "aws_iam_policy_document" "${key}" { - statement { - actions = ["ses:SendRawEmail"] - resources = ["*"] - } - } - - resource "aws_iam_policy" "${key}" { - name = "${namePrefix}-${key}-policy" - description = "Allows sending of e-mails via Simple Email Service" - policy = data.aws_iam_policy_document.${key}.json - } - - resource "aws_iam_user_policy_attachment" "${key}" { - user = aws_iam_user.${key}.name - policy_arn = aws_iam_policy.${key}.arn - } - - """.trimIndent() - ) - - if (project.vpc) { - appendLine( - """ - resource "aws_security_group" "${key}" { - name = "${namePrefix}-${key}" - vpc_id = ${project.vpc_id} - - ingress { - from_port = 587 - to_port = 587 - protocol = "tcp" - cidr_blocks = [${project.vpc_cidr_block}] - } - } - resource "aws_vpc_endpoint" "${key}" { - vpc_id = ${project.vpc_id} - service_name = "com.amazonaws.${'$'}{var.deployment_location}.email-smtp" - security_group_ids = [aws_security_group.${key}.id] - vpc_endpoint_type = "Interface" - } - """.trimIndent() - ) - } - - if (project.domain) { - appendLine( - """ - resource "aws_ses_domain_identity" "${key}" { - domain = var.domain_name - } - resource "aws_ses_domain_mail_from" "$key" { - domain = aws_ses_domain_identity.$key.domain - mail_from_domain = "mail.${'$'}{var.domain_name}" - } - resource "aws_route53_record" "${key}_mx" { - zone_id = data.aws_route53_zone.main.zone_id - name = aws_ses_domain_mail_from.$key.mail_from_domain - type = "MX" - ttl = "600" - records = ["10 feedback-smtp.${'$'}{var.deployment_location}.amazonses.com"] # Change to the region in which `aws_ses_domain_identity.example` is created - } - resource "aws_route53_record" "${key}" { - zone_id = data.aws_route53_zone.main.zone_id - name = "_amazonses.${'$'}{var.domain_name}" - type = "TXT" - ttl = "600" - records = [aws_ses_domain_identity.${key}.verification_token] - } - resource "aws_ses_domain_dkim" "${key}_dkim" { - domain = aws_ses_domain_identity.${key}.domain - } - resource "aws_route53_record" "${key}_spf_mail_from" { - zone_id = data.aws_route53_zone.main.zone_id - name = aws_ses_domain_mail_from.$key.mail_from_domain - type = "TXT" - ttl = "300" - records = [ - "v=spf1 include:amazonses.com -all" - ] - } - resource "aws_route53_record" "${key}_spf_domain" { - zone_id = data.aws_route53_zone.main.zone_id - name = aws_ses_domain_identity.$key.domain - type = "TXT" - ttl = "300" - records = [ - "v=spf1 include:amazonses.com -all" - ] - } - resource "aws_route53_record" "${key}_dkim_records" { - count = 3 - zone_id = data.aws_route53_zone.main.zone_id - name = "${'$'}{element(aws_ses_domain_dkim.${key}_dkim.dkim_tokens, count.index)}._domainkey.${'$'}{var.domain_name}" - type = "CNAME" - ttl = "300" - records = [ - "${'$'}{element(aws_ses_domain_dkim.${key}_dkim.dkim_tokens, count.index)}.dkim.amazonses.com", - ] - } - resource "aws_route53_record" "${key}_route_53_dmarc_txt" { - zone_id = data.aws_route53_zone.main.zone_id - name = "_dmarc.${'$'}{var.domain_name}" - type = "TXT" - ttl = "300" - records = [ - "v=DMARC1;p=quarantine;pct=75;rua=mailto:${'$'}{var.reporting_email}" - ] - } - """.trimIndent() - ) - } else { - appendLine( - """ - resource "aws_ses_email_identity" "${key}" { - email = var.${key}_sender - } - """.trimIndent() - ) - } - }, - settingOutput = { key -> - """ - { - url = "smtp://${'$'}{aws_iam_access_key.${key}.id}:${'$'}{aws_iam_access_key.${key}.ses_smtp_password_v4}@email-smtp.${'$'}{var.deployment_location}.amazonaws.com:587" - fromEmail = ${if (domain) "\"noreply@${'$'}{var.domain_name}\"" else "var.${key}_sender"} - } - """.trimIndent() - } - ) - TerraformHandler.handler( - name = "SMTP through SES with Existing Identity", - inputs = { key -> - listOf( - TerraformInput.string("${key}_sender", null) - ) - }, - emit = { - appendLine( - """ - resource "aws_iam_user" "${key}" { - name = "${namePrefix}-${key}-user" - } - - resource "aws_iam_access_key" "${key}" { - user = aws_iam_user.${key}.name - } - - data "aws_iam_policy_document" "${key}" { - statement { - actions = ["ses:SendRawEmail"] - resources = ["*"] - } - } - - resource "aws_iam_policy" "${key}" { - name = "${namePrefix}-${key}-policy" - description = "Allows sending of e-mails via Simple Email Service" - policy = data.aws_iam_policy_document.${key}.json - } - - resource "aws_iam_user_policy_attachment" "${key}" { - user = aws_iam_user.${key}.name - policy_arn = aws_iam_policy.${key}.arn - } - - """.trimIndent() - ) - - if (project.vpc) { - appendLine( - """ - resource "aws_security_group" "${key}" { - name = "${namePrefix}-${key}" - vpc_id = ${project.vpc_id} - - ingress { - from_port = 587 - to_port = 587 - protocol = "tcp" - cidr_blocks = [${project.vpc_cidr_block}] - } - } - resource "aws_vpc_endpoint" "${key}" { - vpc_id = ${project.vpc_id} - service_name = "com.amazonaws.${'$'}{var.deployment_location}.email-smtp" - security_group_ids = [aws_security_group.${key}.id] - vpc_endpoint_type = "Interface" - } - """.trimIndent() - ) - } - }, - settingOutput = { key -> - """ - { - url = "smtp://${'$'}{aws_iam_access_key.${key}.id}:${'$'}{aws_iam_access_key.${key}.ses_smtp_password_v4}@email-smtp.${'$'}{var.deployment_location}.amazonaws.com:587" - fromEmail = var.${key}_sender - } - """.trimIndent() - } - ) -} - -internal fun defaultAwsHandler(project: TerraformProjectInfo) = with(project) { - TerraformSection( - name = "cloud", - inputs = listOf( - TerraformInput.string( - "deployment_location", - "us-west-2", - description = "The AWS region key to deploy all resources in." - ), - TerraformInput.boolean( - "debug", - false, - description = "The GeneralSettings debug. Debug true will turn on various things during run time for easier development and bug tracking. Should be false for production environments." - ), - TerraformInput.string("ip_prefix", "10.0"), - ) + (if (domain) listOf( - TerraformInput.string( - "domain_name_zone", - null, - description = "The AWS Hosted zone the domain will be placed under." - ), - TerraformInput.string("domain_name", null, description = "The domain the server will be hosted at.") - ) else listOf()) + (if (vpc && existingVpc) listOf( - TerraformInput.string( - "vpc_id", - null, - description = "The AWS VPC id that you want your resources to be placed under." - ), - TerraformInput.stringList("vpc_private_subnets", null), - TerraformInput.stringList("vpc_nat_gateways", null), - ) else listOf()), - emit = { - if (vpc) { - if (existingVpc) { - appendLine( - """ - data "aws_vpc" "main" { - id = var.vpc_id - } - data "aws_subnet" "private" { - for_each = toset(var.vpc_private_subnets) - id = each.value - } - data "aws_nat_gateway" "main" { - for_each = toset(var.vpc_nat_gateways) - id = each.value - } - """.trimIndent() - ) - } else { - appendLine( - """ - module "vpc" { - source = "terraform-aws-modules/vpc/aws" - version = "4.0.2" - - name = "$namePrefix" - cidr = "${'$'}{var.ip_prefix}.0.0/16" - - azs = ["${'$'}{var.deployment_location}a", "${'$'}{var.deployment_location}b", "${'$'}{var.deployment_location}c"] - private_subnets = ["${'$'}{var.ip_prefix}.1.0/24", "${'$'}{var.ip_prefix}.2.0/24", "${'$'}{var.ip_prefix}.3.0/24"] - public_subnets = ["${'$'}{var.ip_prefix}.101.0/24", "${'$'}{var.ip_prefix}.102.0/24", "${'$'}{var.ip_prefix}.103.0/24"] - - enable_nat_gateway = true - single_nat_gateway = true - enable_vpn_gateway = false - enable_dns_hostnames = false - enable_dns_support = true - } - """.trimIndent() - ) - } - appendLine( - """ - - resource "aws_vpc_endpoint" "s3" { - vpc_id = ${project.vpc_id} - service_name = "com.amazonaws.${'$'}{var.deployment_location}.s3" - route_table_ids = ${project.public_route_table_ids} - } - resource "aws_vpc_endpoint" "executeapi" { - vpc_id = ${project.vpc_id} - service_name = "com.amazonaws.${'$'}{var.deployment_location}.execute-api" - security_group_ids = [aws_security_group.executeapi.id] - vpc_endpoint_type = "Interface" - } - resource "aws_vpc_endpoint" "lambdainvoke" { - vpc_id = ${project.vpc_id} - service_name = "com.amazonaws.${'$'}{var.deployment_location}.lambda" - security_group_ids = [aws_security_group.lambdainvoke.id] - vpc_endpoint_type = "Interface" - } - - resource "aws_security_group" "internal" { - name = "$namePrefix-private" - vpc_id = ${project.vpc_id} - - ingress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ${project.subnet_cidr_blocks} - } - - egress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ${project.subnet_cidr_blocks} - } - } - - resource "aws_security_group" "access_outside" { - name = "$namePrefix-access-outside" - vpc_id = ${project.vpc_id} - - egress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - } - - resource "aws_security_group" "executeapi" { - name = "$namePrefix-execute-api" - vpc_id = ${project.vpc_id} - - ingress { - from_port = 443 - to_port = 443 - protocol = "tcp" - cidr_blocks = [${project.vpc_cidr_block}] - } - } - - resource "aws_security_group" "lambdainvoke" { - name = "$namePrefix-lambda-invoke" - vpc_id = ${project.vpc_id} - - ingress { - from_port = 443 - to_port = 443 - protocol = "tcp" - cidr_blocks = [${project.vpc_cidr_block}] - } - } - """.trimIndent() - ) - } - appendLine( - """ - - resource "aws_api_gateway_account" "main" { - cloudwatch_role_arn = aws_iam_role.cloudwatch.arn - } - - resource "aws_iam_role" "cloudwatch" { - name = "${namePrefixSafe}" - - assume_role_policy = < { - val utcTime = - LocalDateTime(LocalDate(2001, 1, 1), s.time).toInstant(s.zone).toLocalDateTime(TimeZone.UTC) - TerraformSection( - name = "schedule_${it.name}", - emit = { - appendLine( - """ - resource "aws_cloudwatch_event_rule" "scheduled_task_${safeName}" { - name = "${namePrefix}_${safeName}" - schedule_expression = "cron(${utcTime.minute} ${utcTime.hour} * * ? *)" - } - resource "aws_cloudwatch_event_target" "scheduled_task_${safeName}" { - rule = aws_cloudwatch_event_rule.scheduled_task_${safeName}.name - target_id = "lambda" - arn = aws_lambda_alias.main.arn - input = "{\"scheduled\": \"${it.name}\"}" - } - resource "aws_lambda_permission" "scheduled_task_${safeName}" { - action = "lambda:InvokeFunction" - function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" - principal = "events.amazonaws.com" - source_arn = aws_cloudwatch_event_rule.scheduled_task_${safeName}.arn - lifecycle { - create_before_destroy = $createBeforeDestroy - } - } - """.trimIndent() - ) - } - ) - } - - is Schedule.Frequency -> { - TerraformSection( - name = "schedule_${it.name}", - emit = { - appendLine( - """ - resource "aws_cloudwatch_event_rule" "scheduled_task_${safeName}" { - name = "${namePrefix}_${safeName}" - schedule_expression = "rate(${s.gap.inWholeMinutes} minute${if (s.gap.inWholeMinutes > 1) "s" else ""})" - } - resource "aws_cloudwatch_event_target" "scheduled_task_${safeName}" { - rule = aws_cloudwatch_event_rule.scheduled_task_${safeName}.name - target_id = "lambda" - arn = aws_lambda_alias.main.arn - input = "{\"scheduled\": \"${it.name}\"}" - } - resource "aws_lambda_permission" "scheduled_task_${safeName}" { - action = "lambda:InvokeFunction" - function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" - principal = "events.amazonaws.com" - source_arn = aws_cloudwatch_event_rule.scheduled_task_${safeName}.arn - lifecycle { - create_before_destroy = $createBeforeDestroy - } - } - """.trimIndent() - ) - } - ) - } - - is Schedule.Cron -> { - TerraformSection( - name = "schedule_${it.name}", - emit = { - appendLine( - """ - resource "aws_cloudwatch_event_rule" "scheduled_task_${safeName}" { - name = "${namePrefix}_${safeName}" - schedule_expression = "cron(${s.cron})" - } - resource "aws_cloudwatch_event_target" "scheduled_task_${safeName}" { - rule = aws_cloudwatch_event_rule.scheduled_task_${safeName}.name - target_id = "lambda" - arn = aws_lambda_alias.main.arn - input = "{\"scheduled\": \"${it.name}\"}" - } - resource "aws_lambda_permission" "scheduled_task_${safeName}" { - action = "lambda:InvokeFunction" - function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" - principal = "events.amazonaws.com" - source_arn = aws_cloudwatch_event_rule.scheduled_task_${safeName}.arn - lifecycle { - create_before_destroy = $createBeforeDestroy - } - } - """.trimIndent() - ) - } - ) - } - } - } -} - -internal fun awsLambdaHandler( - project: TerraformProjectInfo, - handlerFqn: String, - otherSections: List, -) = TerraformSection( - name = "lambda", - inputs = listOf( - TerraformInput.number( - "lambda_memory_size", - 1024, - description = "The amount of ram available (in Megabytes) to the virtual machine running in Lambda." - ), - TerraformInput.number( - "lambda_timeout", - 30, - description = "How long an individual lambda invocation can run before forcefully being shut down." - ), - TerraformInput.boolean( - "lambda_snapstart", - false, - description = "Whether or not lambda will deploy with SnapStart which compromises deploy time for shorter cold start time." - ), - ), - emit = { - appendLine(""" - resource "aws_s3_bucket" "lambda_bucket" { - bucket_prefix = "${project.namePrefixPathSegment}-lambda-bucket" - force_destroy = true - } - - resource "aws_iam_role" "main_exec" { - name = "${project.namePrefix}-main-exec" - - assume_role_policy = jsonencode({ - Version = "2012-10-17" - Statement = [{ - Action = "sts:AssumeRole" - Effect = "Allow" - Sid = "" - Principal = { - Service = "lambda.amazonaws.com" - } - } - ] - }) - } - - resource "aws_iam_policy" "bucketDynamoAndInvoke" { - name = "${project.namePrefix}-bucketDynamoAndInvoke" - path = "/${project.namePrefixPath}/bucketDynamoAndInvoke/" - description = "Access to the ${project.namePrefix} bucket, dynamo, and invoke" - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Action = [ - "s3:GetObject", - ] - Effect = "Allow" - Resource = [ - "${'$'}{aws_s3_bucket.lambda_bucket.arn}", - "${'$'}{aws_s3_bucket.lambda_bucket.arn}/*", - ] - }, - { - Action = [ - "dynamodb:*", - ] - Effect = "Allow" - Resource = ["*"] - }, - { - Action = [ - "lambda:InvokeFunction", - ] - Effect = "Allow" - Resource = "*" - }, - ] - }) - } - - resource "aws_iam_role_policy_attachment" "bucketDynamoAndInvoke" { - role = aws_iam_role.main_exec.name - policy_arn = aws_iam_policy.bucketDynamoAndInvoke.arn - } - resource "aws_iam_role_policy_attachment" "main_policy_exec" { - role = aws_iam_role.main_exec.name - policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" - } - resource "aws_iam_role_policy_attachment" "main_policy_vpc" { - role = aws_iam_role.main_exec.name - policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" - } - resource "aws_iam_role_policy_attachment" "insights_policy" { - role = aws_iam_role.main_exec.id - policy_arn = "arn:aws:iam::aws:policy/CloudWatchLambdaInsightsExecutionRolePolicy" - } - - resource "aws_s3_object" "app_storage" { - bucket = aws_s3_bucket.lambda_bucket.id - - key = "lambda-functions.zip" - source = data.archive_file.lambda.output_path - - source_hash = data.archive_file.lambda.output_md5 - depends_on = [data.archive_file.lambda] - } - - resource "aws_lambda_function" "main" { - function_name = "${project.namePrefix}-main" - publish = var.lambda_snapstart - - s3_bucket = aws_s3_bucket.lambda_bucket.id - s3_key = aws_s3_object.app_storage.key - - runtime = "java17" - handler = "$handlerFqn" - - memory_size = "${'$'}{var.lambda_memory_size}" - timeout = var.lambda_timeout - # memory_size = "1024" - - source_code_hash = data.archive_file.lambda.output_base64sha256 - - role = aws_iam_role.main_exec.arn - - snap_start { - apply_on = "PublishedVersions" - } - - ${ - if (project.vpc) - """ - | vpc_config { - | subnet_ids = ${project.privateSubnets} - | security_group_ids = [aws_security_group.internal.id, aws_security_group.access_outside.id] - | } - """.trimMargin() - else - "" - } - - environment { - variables = { - LIGHTNING_SERVER_SETTINGS_DECRYPTION = random_password.settings.result - } - } - - depends_on = [aws_s3_object.app_storage] - } - - resource "aws_lambda_alias" "main" { - name = "prod" - description = "The current production version of the lambda." - function_name = aws_lambda_function.main.arn - function_version = var.lambda_snapstart ? aws_lambda_function.main.version : "${'$'}LATEST" - } - - resource "aws_cloudwatch_log_group" "main" { - name = "${project.namePrefix}-main-log" - retention_in_days = 30 - } - - resource "local_sensitive_file" "settings_raw" { - content = jsonencode({ - ${ - otherSections.mapNotNull { it.toLightningServer }.flatMap { it.entries }.map { "${it.key} = ${it.value}" } - .map { it.replace("\n", "\n ") }.joinToString("\n ") - }}) - filename = "${'$'}{path.module}/build/raw-settings.json" - } - - locals { - # Directories start with "C:..." on Windows; All other OSs use "/" for root. - is_windows = substr(pathexpand("~"), 0, 1) == "/" ? false : true - } - resource "null_resource" "lambda_jar_source" { - triggers = { - always = timestamp() - } - provisioner "local-exec" { - command = (local.is_windows ? "if(test-path \"${'$'}{path.module}/build/lambda/\") { rd -Recurse \"${'$'}{path.module}/build/lambda/\" }" : "rm -rf \"${'$'}{path.module}/build/lambda/\"") - interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] - } - provisioner "local-exec" { - command = (local.is_windows ? "cp -r -force \"${'$'}{path.module}/../../build/dist/lambda/.\" \"${'$'}{path.module}/build/lambda/\"" : "cp -rf \"${'$'}{path.module}/../../build/dist/lambda/.\" \"${'$'}{path.module}/build/lambda/\"") - interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] - } - provisioner "local-exec" { - command = "openssl enc -aes-256-cbc -md sha256 -in \"${'$'}{local_sensitive_file.settings_raw.filename}\" -out \"${'$'}{path.module}/build/lambda/settings.enc\" -pass pass:${'$'}{random_password.settings.result}" - interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] - } - } - resource "null_resource" "settings_reread" { - triggers = { - settingsRawHash = local_sensitive_file.settings_raw.content - } - depends_on = [null_resource.lambda_jar_source] - provisioner "local-exec" { - command = "openssl enc -d -aes-256-cbc -md sha256 -out \"${'$'}{local_sensitive_file.settings_raw.filename}.decrypted.json\" -in \"${'$'}{path.module}/build/lambda/settings.enc\" -pass pass:${'$'}{random_password.settings.result}" - interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] - } - } - - resource "random_password" "settings" { - length = 32 - special = true - override_special = "_" - } - - data "archive_file" "lambda" { - depends_on = [null_resource.lambda_jar_source, null_resource.settings_reread] - type = "zip" - source_dir = "${'$'}{path.module}/build/lambda" - output_path = "${'$'}{path.module}/build/lambda.jar" - } - - - """.trimIndent() - ) - } -) - -internal fun httpAwsHandler(projectInfo: TerraformProjectInfo) = TerraformSection( - name = "http", - emit = { - appendLine( - """ - resource "aws_apigatewayv2_api" "http" { - name = "${projectInfo.namePrefix}-http" - protocol_type = "HTTP" - } - - resource "aws_apigatewayv2_stage" "http" { - api_id = aws_apigatewayv2_api.http.id - - name = "${projectInfo.namePrefix}-gateway-stage" - auto_deploy = true - - access_log_settings { - destination_arn = aws_cloudwatch_log_group.http_api.arn - - format = jsonencode({ - requestId = "${'$'}context.requestId" - sourceIp = "${'$'}context.identity.sourceIp" - requestTime = "${'$'}context.requestTime" - protocol = "${'$'}context.protocol" - httpMethod = "${'$'}context.httpMethod" - resourcePath = "${'$'}context.resourcePath" - routeKey = "${'$'}context.routeKey" - status = "${'$'}context.status" - responseLength = "${'$'}context.responseLength" - integrationErrorMessage = "${'$'}context.integrationErrorMessage" - } - ) - } - } - - resource "aws_apigatewayv2_integration" "http" { - api_id = aws_apigatewayv2_api.http.id - - integration_uri = aws_lambda_alias.main.invoke_arn - integration_type = "AWS_PROXY" - integration_method = "POST" - } - - resource "aws_cloudwatch_log_group" "http_api" { - name = "${projectInfo.namePrefix}-http-gateway-log" - - retention_in_days = 30 - } - - resource "aws_apigatewayv2_route" "http" { - api_id = aws_apigatewayv2_api.http.id - route_key = "${'$'}default" - target = "integrations/${'$'}{aws_apigatewayv2_integration.http.id}" - } - - resource "aws_lambda_permission" "api_gateway_http" { - action = "lambda:InvokeFunction" - function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" - principal = "apigateway.amazonaws.com" - - source_arn = "${'$'}{aws_apigatewayv2_api.http.execution_arn}/*/*" - lifecycle { - create_before_destroy = ${projectInfo.createBeforeDestroy} - } - } - """.trimIndent() - ) - if (projectInfo.domain) { - appendLine( - """ - resource "aws_acm_certificate" "http" { - domain_name = var.domain_name - validation_method = "DNS" - } - resource "aws_route53_record" "http" { - zone_id = data.aws_route53_zone.main.zone_id - name = tolist(aws_acm_certificate.http.domain_validation_options)[0].resource_record_name - type = tolist(aws_acm_certificate.http.domain_validation_options)[0].resource_record_type - records = [tolist(aws_acm_certificate.http.domain_validation_options)[0].resource_record_value] - ttl = "300" - } - resource "aws_acm_certificate_validation" "http" { - certificate_arn = aws_acm_certificate.http.arn - validation_record_fqdns = [aws_route53_record.http.fqdn] - } - resource aws_apigatewayv2_domain_name http { - domain_name = var.domain_name - domain_name_configuration { - certificate_arn = aws_acm_certificate.http.arn - endpoint_type = "REGIONAL" - security_policy = "TLS_1_2" - } - depends_on = [aws_acm_certificate_validation.http] - } - resource aws_apigatewayv2_api_mapping http { - stage = aws_apigatewayv2_stage.http.id - api_id = aws_apigatewayv2_stage.http.api_id - domain_name = aws_apigatewayv2_domain_name.http.domain_name - } - resource aws_route53_record httpAccess { - type = "A" - name = aws_apigatewayv2_domain_name.http.domain_name - zone_id = data.aws_route53_zone.main.id - alias { - evaluate_target_health = false - name = aws_apigatewayv2_domain_name.http.domain_name_configuration[0].target_domain_name - zone_id = aws_apigatewayv2_domain_name.http.domain_name_configuration[0].hosted_zone_id - } - } - """.trimIndent() - ) - } - }, - outputs = listOf( - TerraformOutput("http_url", "aws_apigatewayv2_stage.http.invoke_url"), - TerraformOutput( - "http", """ - { - id = aws_apigatewayv2_stage.http.id - api_id = aws_apigatewayv2_stage.http.api_id - invoke_url = aws_apigatewayv2_stage.http.invoke_url - arn = aws_apigatewayv2_stage.http.arn - name = aws_apigatewayv2_stage.http.name - } - """.trimIndent() - ), - ) -) - -internal fun wsAwsHandler(projectInfo: TerraformProjectInfo) = TerraformSection( - name = "websockets", - emit = { - appendLine( - """ - resource "aws_apigatewayv2_api" "ws" { - name = "${projectInfo.namePrefix}-gateway" - protocol_type = "WEBSOCKET" - route_selection_expression = "constant" - } - - resource "aws_apigatewayv2_stage" "ws" { - api_id = aws_apigatewayv2_api.ws.id - - name = "${projectInfo.namePrefix}-gateway-stage" - auto_deploy = true - - access_log_settings { - destination_arn = aws_cloudwatch_log_group.ws_api.arn - - format = jsonencode({ - requestId = "${'$'}context.requestId" - sourceIp = "${'$'}context.identity.sourceIp" - requestTime = "${'$'}context.requestTime" - protocol = "${'$'}context.protocol" - httpMethod = "${'$'}context.httpMethod" - resourcePath = "${'$'}context.resourcePath" - routeKey = "${'$'}context.routeKey" - status = "${'$'}context.status" - responseLength = "${'$'}context.responseLength" - integrationErrorMessage = "${'$'}context.integrationErrorMessage" - } - ) - } - } - - resource "aws_apigatewayv2_integration" "ws" { - api_id = aws_apigatewayv2_api.ws.id - - integration_uri = aws_lambda_alias.main.invoke_arn - integration_type = "AWS_PROXY" - integration_method = "POST" - } - - resource "aws_cloudwatch_log_group" "ws_api" { - name = "${projectInfo.namePrefix}-ws-gateway-log" - - retention_in_days = 30 - } - - resource "aws_apigatewayv2_route" "ws_connect" { - api_id = aws_apigatewayv2_api.ws.id - - route_key = "${'$'}connect" - target = "integrations/${'$'}{aws_apigatewayv2_integration.ws.id}" - } - resource "aws_apigatewayv2_route" "ws_default" { - api_id = aws_apigatewayv2_api.ws.id - - route_key = "${'$'}default" - target = "integrations/${'$'}{aws_apigatewayv2_integration.ws.id}" - } - resource "aws_apigatewayv2_route" "ws_disconnect" { - api_id = aws_apigatewayv2_api.ws.id - - route_key = "${'$'}disconnect" - target = "integrations/${'$'}{aws_apigatewayv2_integration.ws.id}" - } - - resource "aws_lambda_permission" "api_gateway_ws" { - action = "lambda:InvokeFunction" - function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" - principal = "apigateway.amazonaws.com" - - source_arn = "${'$'}{aws_apigatewayv2_api.ws.execution_arn}/*/*" - lifecycle { - create_before_destroy = ${projectInfo.createBeforeDestroy} - } - } - - resource "aws_iam_policy" "api_gateway_ws" { - name = "${projectInfo.namePrefix}-api_gateway_ws" - path = "/${projectInfo.namePrefixPath}/api_gateway_ws/" - description = "Access to the ${projectInfo.namePrefix}_api_gateway_ws management" - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Action = [ - "execute-api:ManageConnections" - ] - Effect = "Allow" - Resource = "*" - }, - ] - }) - } - resource "aws_iam_role_policy_attachment" "api_gateway_ws" { - role = aws_iam_role.main_exec.name - policy_arn = aws_iam_policy.api_gateway_ws.arn - } - """.trimIndent() - ) - if (projectInfo.domain) { - appendLine( - """ - resource "aws_acm_certificate" "ws" { - domain_name = "ws.${'$'}{var.domain_name}" - validation_method = "DNS" - } - resource "aws_route53_record" "ws" { - zone_id = data.aws_route53_zone.main.zone_id - name = tolist(aws_acm_certificate.ws.domain_validation_options)[0].resource_record_name - type = tolist(aws_acm_certificate.ws.domain_validation_options)[0].resource_record_type - records = [tolist(aws_acm_certificate.ws.domain_validation_options)[0].resource_record_value] - ttl = "300" - } - resource "aws_acm_certificate_validation" "ws" { - certificate_arn = aws_acm_certificate.ws.arn - validation_record_fqdns = [aws_route53_record.ws.fqdn] - } - resource aws_apigatewayv2_domain_name ws { - domain_name = "ws.${'$'}{var.domain_name}" - domain_name_configuration { - certificate_arn = aws_acm_certificate.ws.arn - endpoint_type = "REGIONAL" - security_policy = "TLS_1_2" - } - depends_on = [aws_acm_certificate_validation.ws] - } - resource aws_apigatewayv2_api_mapping ws { - stage = aws_apigatewayv2_stage.ws.id - api_id = aws_apigatewayv2_stage.ws.api_id - domain_name = aws_apigatewayv2_domain_name.ws.domain_name - } - resource aws_route53_record wsAccess { - type = "A" - name = aws_apigatewayv2_domain_name.ws.domain_name - zone_id = data.aws_route53_zone.main.id - alias { - evaluate_target_health = false - name = aws_apigatewayv2_domain_name.ws.domain_name_configuration[0].target_domain_name - zone_id = aws_apigatewayv2_domain_name.ws.domain_name_configuration[0].hosted_zone_id - } - } - """.trimIndent() - ) - } - }, - outputs = listOf( - TerraformOutput("ws_url", "aws_apigatewayv2_stage.ws.invoke_url"), - TerraformOutput( - "ws", """ - { - id = aws_apigatewayv2_stage.ws.id - api_id = aws_apigatewayv2_stage.ws.api_id - invoke_url = aws_apigatewayv2_stage.ws.invoke_url - arn = aws_apigatewayv2_stage.ws.arn - name = aws_apigatewayv2_stage.ws.name - } - """.trimIndent() - ), - ) -) - -fun terraformMigrate(handlerFqn: String, folder: File) { - val newFolder = folder - val oldFolder = folder.parentFile!!.resolve(folder.name + "-old") - folder.renameTo(oldFolder) - newFolder.mkdirs() - try { - - val handlerFile = oldFolder.resolve("handlers.properties") - val handlerNames: Properties = handlerFile - .takeIf { it.exists() } - ?.let { Properties().apply { it.inputStream().use { s -> load(s) } } } - ?: Properties() - - val oldBaseTfText = oldFolder.resolve("base/main.tf").readText() - - for (environmentOld in oldFolder.listFiles()!!) { - if (!environmentOld.isDirectory) continue - if (environmentOld.name == "base") continue - if (environmentOld.name == "domain") continue - if (environmentOld.name == "nodomain") continue - val environmentNew = newFolder.resolve(environmentOld.name) - environmentNew.mkdirs() - println("AWS Profile for ${environmentOld.name}:") - val profile = readln() - val oldTfText = environmentOld.resolve("main.tf").readText() - val info = TerraformProjectInfo( - projectName = oldBaseTfText - .substringAfter("name") - .substringAfter('=') - .trim() - .substringBefore("$") - .trim('"') - .trim('-') + "-" + oldTfText - .substringAfter("deployment_name") - .substringAfter('=') - .substringAfter('"') - .trim() - .substringBefore('"') - .trim('-'), - bucket = oldTfText - .substringAfter("bucket") - .substringAfter('=') - .substringAfter('"') - .trim() - .substringBefore('"'), - bucketPathOverride = oldTfText - .substringAfter("key") - .substringAfter('=') - .substringAfter('"') - .trim() - .substringBefore('"'), - vpc = false, - domain = oldTfText.contains("../domain"), - profile = profile, - handlers = handlerNames.keys.filterIsInstance().associateWith { handlerNames.getProperty(it) } - ) - val projectInfoFile = environmentNew.resolve("project.json") - @Suppress("JSON_FORMAT_REDUNDANT") - projectInfoFile.writeText( - Json(Serialization.Internal.json) { prettyPrint = true } - .encodeToString(TerraformProjectInfo.serializer(), info) - ) - val oldStateFile = environmentNew.resolve("oldstate.json") - assert(ProcessBuilder() - .directory(environmentOld) - .apply { environment()["AWS_PROFILE"] = profile } - .command("terraform", "state", "pull") - .inheritIO() - .redirectOutput(oldStateFile) - .start() - .waitFor() == 0) - environmentNew.resolve("terraform.tfvars").takeIf { !it.exists() }?.writeText( - oldTfText.substringAfter("module \"domain\" {").trim().removeSuffix("}") - ) - terraformEnvironmentAws(handlerFqn, environmentNew) - println("For $environmentNew:") - println(" - Clean up terraform.tfvars") - println(" - Run `./tf init && ./tf state push newstate.json` to import a migrated state") - } - } catch (e: Exception) { - newFolder.deleteRecursively() - oldFolder.renameTo(newFolder) - } -} - -fun terraformAws(handlerFqn: String, projectName: String = "project", root: File) { - if (root.resolve("base").exists()) { - println("Base folder detected; need to migrate to new Terraform format.") - println("***WARNING***") - println("You *MUST* rebuild your program to use the new terraform due to a new settings parser!") - println("Ensure the new AwsHandler uses 'loadSettings(AwsHandler::class.java)' to load settings.") - println("Enter 'understood' to proceed.") - assert(readln().equals("understood", true)) - terraformMigrate(handlerFqn, root) - return - } - root.mkdirs() - root.listFiles()!!.filter { it.isDirectory }.plus( - root.resolve("example") - ).distinct().forEach { terraformEnvironmentAws(handlerFqn, it, projectName) } -} - -fun terraformEnvironmentAws(handlerFqn: String, folder: File, projectName: String = "project") { - handlers() - val projectInfoFile = folder.resolve("project.json") - folder.mkdirs() - val defaultHandlers = Settings.requirements.entries.associate { - it.key to (TerraformHandler.handlers[it.value.serializer]?.maxBy { it.value.priority }?.key ?: "Direct") - } - val info = projectInfoFile - .takeIf { it.exists() } - ?.readText() - ?.let { Serialization.Internal.json.decodeFromString(TerraformProjectInfo.serializer(), it) } - ?.let { it.copy(handlers = defaultHandlers + it.handlers) } - ?: TerraformProjectInfo( - projectName = projectName, - bucket = "your-deployment-bucket", - vpc = false, - domain = true, - profile = "default", - handlers = defaultHandlers, - ) - @Suppress("JSON_FORMAT_REDUNDANT") - projectInfoFile.writeText( - Json(Serialization.Internal.json) { prettyPrint = true } - .encodeToString(TerraformProjectInfo.serializer(), info) - ) - - val sections = listOf( - listOf(defaultAwsHandler(info)), - Settings.requirements.values.map { - val handler = TerraformHandler.handlers[it.serializer]?.let { handlers -> - val handlerName = info.handlers.get(it.name) - if (handlerName == "Direct") return@let null - handlers[handlerName]!! - } - handler?.makeSection?.invoke(info, it.name) ?: TerraformSection.default(it) - }, - listOfNotNull( -// if(Http.endpoints.isNotEmpty()) httpAwsHandler(info) else null, - httpAwsHandler(info), -// if(WebSockets.handlers.isNotEmpty()) wsAwsHandler(info) else null, - wsAwsHandler(info), - awsCloudwatch(info), - ), - scheduleAwsHandlers(info) - ).flatten() - val allSections = sections + awsLambdaHandler(info, handlerFqn, sections) - - val sectionToFile = allSections.associateWith { section -> - folder.resolve(section.name.filter { it.isLetterOrDigit() } + ".tf") - } - val warning = "# Generated via Lightning Server. This file will be overwritten or deleted when regenerating." - folder.listFiles()!!.filter { - it.extension == "tf" && it.readText().contains(warning) - }.forEach { it.delete() } - for ((section, file) in sectionToFile) { -// if(!file.readText().contains(warning)) continue - file.printWriter().use { it -> - it.appendLine(warning) - it.appendLine("##########") - it.appendLine("# Inputs") - it.appendLine("##########") - it.appendLine() - for (input in section.inputs) { - it.appendLine("variable \"${input.name}\" {") - it.appendLine(" type = ${input.type}") - input.default?.let { d -> - it.appendLine(" default = $d") - } - it.appendLine(" nullable = ${input.nullable}") - input.description?.let { d -> - it.appendLine(" description = \"$d\"") - } - input.validations.forEach { validation -> - it.appendLine(" validation {") - it.appendLine(" condition = ${validation.condition}") - it.appendLine(" error_message = ${validation.errorMessage}") - it.appendLine(" }") - } - it.appendLine("}") - } - it.appendLine() - it.appendLine("##########") - it.appendLine("# Outputs") - it.appendLine("##########") - it.appendLine() - for (output in section.outputs) { - it.appendLine("output \"${output.name}\" {") - it.appendLine(" value = ${output.value}") - it.appendLine("}") - } - it.appendLine() - it.appendLine("##########") - it.appendLine("# Resources") - it.appendLine("##########") - it.appendLine() - section.emit(it) - it.appendLine() - } - } - - val usingMongo = allSections.any { it.providers.any { it.name == "mongodbatlas" } } - if (usingMongo) { - fun get(name: String): String { - println("$name for profile ${info.profile}:") - return readln() - } - - val mongoCredsFile = File(System.getProperty("user.home")).resolve(".mongo/profiles/${info.profile}.env") - val mongoCredsFile2 = File(System.getProperty("user.home")).resolve(".mongo/profiles/${info.profile}.ps1") - mongoCredsFile.parentFile.mkdirs() - if (!mongoCredsFile.exists()) { - val mongoPublic = if (usingMongo) get("MongoDB Public Key") else null - val mongoPrivate = if (usingMongo) get("MongoDB Private Key") else null - mongoCredsFile.writeText( - """ - MONGODB_ATLAS_PUBLIC_KEY="$mongoPublic" - MONGODB_ATLAS_PRIVATE_KEY="$mongoPrivate" - """.trimIndent() + "\n" - ) - mongoCredsFile.setExecutable(true) - mongoCredsFile2.writeText( - """ - ${'$'}env:MONGODB_ATLAS_PUBLIC_KEY = "$mongoPublic" - ${'$'}env:MONGODB_ATLAS_PRIVATE_KEY = "$mongoPrivate" - """.trimIndent() + "\n" - ) - mongoCredsFile2.setExecutable(true) - } - } - - folder.resolve("tf").printWriter().use { - it.appendLine("#!/bin/bash") - it.appendLine("export AWS_PROFILE=${info.profile}") - if (usingMongo) { - it.appendLine( - """ - export ${'$'}(cat ~/.mongo/profiles/${info.profile}.env | xargs) - """.trimIndent() - ) - } - it.appendLine("terraform \"$@\"") - } - folder.resolve("tf").setExecutable(true) - - folder.resolve("tf.ps1").printWriter().use { - it.appendLine("\$env:AWS_PROFILE = \"${info.profile}\"") - if (usingMongo) { - it.appendLine( - """ - . ~/.mongo/profiles/${info.profile}.ps1 - """.trimIndent() - ) - } - it.appendLine("terraform \$args") - } - folder.resolve("tf.ps1").setExecutable(true) - - folder.resolve("main.tf").printWriter().use { - it.appendLine("""terraform {""") - it.appendLine(" required_providers {") - for (provider in allSections.flatMap { it.providers }.distinct()) { - it.appendLine(" ${provider.name} = {") - it.appendLine(" source = \"${provider.source}\"") - it.appendLine(" version = \"${provider.version}\"") - it.appendLine(" }") - } - it.appendLine(" }") - it.appendLine(""" backend "s3" {""") - it.appendLine(""" bucket = "${info.bucket}"""") - info.bucketPathOverride?.let { override -> - it.appendLine(""" key = "${override}"""") - } ?: run { - it.appendLine(""" key = "${info.projectNameSafe}/${folder.name}"""") - } - it.appendLine(""" region = "us-west-2"""") - it.appendLine(""" }""") - it.appendLine("""}""") - it.appendLine("""provider "aws" {""") - it.appendLine(""" region = "us-west-2"""") - it.appendLine("""}""") - it.appendLine("""provider "aws" {""") - it.appendLine(""" alias = "acm"""") - it.appendLine(""" region = "us-east-1"""") - it.appendLine("""}""") - if (usingMongo) { - it.appendLine( - """ - provider "mongodbatlas" { - } - """.trimIndent() - ) - } - } - - folder.resolve("oldstate.json").takeIf { it.exists() }?.readText()?.let { - folder.resolve("newstate.json").writeText(it - .replace("module.domain.module.Base.", "") - .replace("module.nodomain.module.Base.", "") - .replace("module.domain.module.Base", "") - .replace("module.nodomain.module.Base", "") - .replace("module.Base.", "") - .replace("module.domain.", "") - .replace("module.nodomain.", "") - .replace("module.Base", "") - .replace("module.domain", "") - .replace("module.nodomain", "") - .replace(""""module": "",""", "") - .let { - it.substringBefore("\"serial\": ") + - "\"serial\": " + - it.substringAfter("\"serial\": ").substringBefore(",").toInt().plus(1) + - "," + - it.substringAfter("\"serial\": ").substringAfter(",") - } - ) - } - - folder.resolve("terraform.tfvars").takeUnless { it.exists() }?.writeText( - allSections.flatMap { it.inputs }.distinct().joinToString("\n") { it.name + " = " + it.default } + "\n" - ) -} diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Models.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Models.kt new file mode 100644 index 000000000..be3b863bb --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Models.kt @@ -0,0 +1,46 @@ +package com.lightningkite.lightningserver.aws.terraform + +import kotlinx.serialization.Serializable + + +@Serializable +data class TerraformProjectInfo( + val projectName: String, + val bucket: String, + val bucketPathOverride: String? = null, + val core: TerraformCoreType = TerraformCoreType.Lambda, + val vpc: Boolean = true, + val existingVpc: Boolean = false, + val domain: Boolean = true, + val profile: String, + val createBeforeDestroy: Boolean = false, + val handlers: Map = mapOf(), +) { +} + +@Serializable +enum class TerraformCoreType { + Lambda, SingleEC2, ELB +} + +internal val TerraformProjectInfo.privateSubnets get() = if (existingVpc) "[for s in data.aws_subnet.private : s.id]" else "module.vpc.private_subnets" +internal val TerraformProjectInfo.subnet_cidr_blocks get() = if (existingVpc) "[for s in data.aws_subnet.private : s.cidr_block]" else "concat(module.vpc.private_subnets_cidr_blocks, module.vpc.private_subnets_cidr_blocks, [])" +internal val TerraformProjectInfo.vpc_id get() = if (existingVpc) "data.aws_vpc.main.id" else "module.vpc.vpc_id" +internal val TerraformProjectInfo.vpc_cidr_block get() = if (existingVpc) "data.aws_vpc.main.cidr_block" else "module.vpc.vpc_cidr_block" +internal val TerraformProjectInfo.public_route_table_ids get() = if (existingVpc) "toset([data.aws_vpc.main.main_route_table_id])" else "module.vpc.public_route_table_ids" +internal val TerraformProjectInfo.natGatewayIp get() = if (existingVpc) "[for s in data.aws_nat_gateway.main : s.public_ip]" else "module.vpc.nat_public_ips" + +internal val TerraformProjectInfo.projectNameSafe: String + get() = projectName.filter { + it.isLetterOrDigit() || it in setOf( + '-', + '_' + ) + } +internal val TerraformProjectInfo.namePrefix: String get() = projectNameSafe +internal val TerraformProjectInfo.namePrefixLower: String get() = projectNameSafe.lowercase() +internal val TerraformProjectInfo.namePrefixUnderscores: String get() = projectNameSafe.replace("-", "_") +internal val TerraformProjectInfo.namePrefixSafe: String get() = projectNameSafe.filter { it.isLetterOrDigit() } +internal val TerraformProjectInfo.namePrefixPath: String + get() = projectNameSafe.lowercase().replace("-", "/").replace("_", "") +internal val TerraformProjectInfo.namePrefixPathSegment: String get() = projectNameSafe.lowercase().replace("_", "") \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformHandler.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformHandler.kt new file mode 100644 index 000000000..71fc57dab --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformHandler.kt @@ -0,0 +1,42 @@ +package com.lightningkite.lightningserver.aws.terraform + +import kotlinx.serialization.KSerializer +import kotlinx.serialization.serializer +import java.util.HashMap + +internal data class TerraformHandler( + val name: String, + val priority: Int = 0, + val makeSection: TerraformProjectInfo.(settingKey: String) -> TerraformSection, +) { + companion object { + val handlers = + HashMap, HashMap>() + + inline fun handler( + name: String = "Standard", + priority: Int = 0, + providers: List = listOf( + TerraformProvider.aws, + TerraformProvider.random, + TerraformProvider.archive + ), + crossinline policies: TerraformProjectInfo.(settingKey: String)->List = { listOf() }, + noinline inputs: TerraformProjectInfo.(settingKey: String) -> List = { listOf() }, + noinline emit: TerraformRequirementBuildInfo.() -> Unit = { }, + noinline settingOutput: TerraformProjectInfo.(settingKey: String) -> String, + ) { + handlers.getOrPut(serializer()) { HashMap() }.put(name, TerraformHandler(name, priority) { it -> + TerraformSection( + name = it, + providers = providers, + policies = policies(this, it), + inputs = inputs(this, it), + emit = { emit(TerraformRequirementBuildInfo(this@TerraformHandler, it, this)) }, + toLightningServer = mapOf(it to settingOutput(this, it)), + outputs = listOf() + ) + }) + } + } +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformInput.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformInput.kt new file mode 100644 index 000000000..e278ea2b8 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformInput.kt @@ -0,0 +1,30 @@ +package com.lightningkite.lightningserver.aws.terraform + +internal data class TerraformInput( + val name: String, + val type: String, + val default: String?, + val nullable: Boolean = false, + val description: String? = null, + val validations: List = emptyList(), +) { + companion object { + fun stringList(name: String, default: List?, nullable: Boolean = false, description: String? = null) = + TerraformInput( + name, + "list(string)", + default?.joinToString(", ", "[", "]") { "\"$it\"" }, + nullable = nullable, + description = description, + ) + + fun string(name: String, default: String?, nullable: Boolean = false, description: String? = null) = + TerraformInput(name, "string", default?.let { "\"$it\"" }, nullable = nullable, description = description) + + fun boolean(name: String, default: Boolean?, nullable: Boolean = false, description: String? = null) = + TerraformInput(name, "bool", default?.toString(), nullable = nullable, description = description) + + fun number(name: String, default: Number?, nullable: Boolean = false, description: String? = null) = + TerraformInput(name, "number", default?.toString(), nullable = nullable, description = description) + } +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformOutput.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformOutput.kt new file mode 100644 index 000000000..596096334 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformOutput.kt @@ -0,0 +1,7 @@ +package com.lightningkite.lightningserver.aws.terraform + +internal data class TerraformOutput(val name: String, val value: String, val sensitive: Boolean = false) { + companion object { + + } +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformProvider.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformProvider.kt new file mode 100644 index 000000000..c462e711e --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformProvider.kt @@ -0,0 +1,16 @@ +package com.lightningkite.lightningserver.aws.terraform + +internal data class TerraformProvider( + val name: String, + val source: String, + val version: String, +) { + companion object { + val aws = TerraformProvider("aws", "hashicorp/aws", "~> 4.30") + val random = TerraformProvider("random", "hashicorp/random", "~> 3.1.0") + val archive = TerraformProvider("archive", "hashicorp/archive", "~> 2.2.0") + val mongodbatlas = TerraformProvider("mongodbatlas", "mongodb/mongodbatlas", "~> 1.4") + val local = TerraformProvider("local", "hashicorp/local", "~> 2.2") + val nullProvider = TerraformProvider("null", "hashicorp/null", "~> 3.2") + } +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformRequirementBuildInfo.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformRequirementBuildInfo.kt new file mode 100644 index 000000000..797a697ee --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformRequirementBuildInfo.kt @@ -0,0 +1,17 @@ +package com.lightningkite.lightningserver.aws.terraform + + +internal data class TerraformRequirementBuildInfo( + val project: TerraformProjectInfo, + val name: String, + val appendable: Appendable, +) : Appendable by appendable { + val key: String get() = name +} + +internal val TerraformRequirementBuildInfo.namePrefix: String get() = project.namePrefix +internal val TerraformRequirementBuildInfo.namePrefixLower: String get() = project.namePrefixLower +internal val TerraformRequirementBuildInfo.namePrefixUnderscores: String get() = project.namePrefixUnderscores +internal val TerraformRequirementBuildInfo.namePrefixSafe: String get() = project.namePrefixSafe +internal val TerraformRequirementBuildInfo.namePrefixPath: String get() = project.namePrefixPath +internal val TerraformRequirementBuildInfo.namePrefixPathSegment: String get() = project.namePrefixPathSegment \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformSection.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformSection.kt new file mode 100644 index 000000000..2859a4426 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/TerraformSection.kt @@ -0,0 +1,42 @@ +package com.lightningkite.lightningserver.aws.terraform + +import com.lightningkite.lightningserver.serialization.Serialization +import com.lightningkite.lightningserver.settings.Settings + +internal data class TerraformSection( + val name: String, + val providers: List = listOf( + TerraformProvider.aws, + TerraformProvider.local, + TerraformProvider.random, + TerraformProvider.nullProvider, + TerraformProvider.archive + ), + val policies: List = listOf(), + val inputs: List = listOf(), + val emit: Appendable.() -> Unit = {}, + val toLightningServer: Map? = null, + val outputs: List = listOf(), +) { + companion object { + + fun default(setting: Settings.Requirement) = TerraformSection( + name = setting.name, + inputs = listOf( + TerraformInput( + name = setting.name, + type = "any", + default = setting.default.let { + Serialization.Internal.json.encodeToString( + setting.serializer, + it + ) + }, + nullable = setting.serializer.descriptor.isNullable, + description = setting.description + ), + ), + toLightningServer = mapOf(setting.name to "var.${setting.name}") + ) + } +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Validation.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Validation.kt new file mode 100644 index 000000000..1aacaafb0 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/Validation.kt @@ -0,0 +1,6 @@ +package com.lightningkite.lightningserver.aws.terraform + +internal data class Validation( + val condition: String, + val errorMessage: String, +) \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/commonAws.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/commonAws.kt new file mode 100644 index 000000000..9c77f9574 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/commonAws.kt @@ -0,0 +1,184 @@ +package com.lightningkite.lightningserver.aws.terraform + +import com.lightningkite.lightningserver.auth.JwtSigner +import com.lightningkite.lightningserver.cache.CacheSettings +import com.lightningkite.lightningserver.db.DatabaseSettings +import com.lightningkite.lightningserver.email.EmailSettings +import com.lightningkite.lightningserver.encryption.SecretBasis +import com.lightningkite.lightningserver.files.FilesSettings +import com.lightningkite.lightningserver.metrics.MetricSettings +import com.lightningkite.lightningserver.metrics.MetricType +import com.lightningkite.lightningserver.schedule.Schedule +import com.lightningkite.lightningserver.schedule.Scheduler +import com.lightningkite.lightningserver.serialization.Serialization +import com.lightningkite.lightningserver.settings.GeneralServerSettings +import com.lightningkite.lightningserver.settings.Settings +import kotlinx.datetime.* +import kotlinx.datetime.TimeZone +import kotlinx.serialization.json.Json +import java.io.File +import java.util.* + + +internal fun defaultAwsHandler(project: TerraformProjectInfo) = with(project) { + TerraformSection( + name = "cloud", + inputs = listOf( + TerraformInput.string( + "deployment_location", + "us-west-2", + description = "The AWS region key to deploy all resources in." + ), + TerraformInput.boolean( + "debug", + false, + description = "The GeneralSettings debug. Debug true will turn on various things during run time for easier development and bug tracking. Should be false for production environments." + ), + TerraformInput.string("ip_prefix", "10.0"), + ) + (if (domain) listOf( + TerraformInput.string( + "domain_name_zone", + null, + description = "The AWS Hosted zone the domain will be placed under." + ), + TerraformInput.string("domain_name", null, description = "The domain the server will be hosted at.") + ) else listOf()) + (if (vpc && existingVpc) listOf( + TerraformInput.string( + "vpc_id", + null, + description = "The AWS VPC id that you want your resources to be placed under." + ), + TerraformInput.stringList("vpc_private_subnets", null), + TerraformInput.stringList("vpc_nat_gateways", null), + ) else listOf()), + emit = { + if (vpc) { + if (existingVpc) { + appendLine( + """ + data "aws_vpc" "main" { + id = var.vpc_id + } + data "aws_subnet" "private" { + for_each = toset(var.vpc_private_subnets) + id = each.value + } + data "aws_nat_gateway" "main" { + for_each = toset(var.vpc_nat_gateways) + id = each.value + } + """.trimIndent() + ) + } else { + appendLine( + """ + module "vpc" { + source = "terraform-aws-modules/vpc/aws" + version = "4.0.2" + + name = "$namePrefix" + cidr = "${'$'}{var.ip_prefix}.0.0/16" + + azs = ["${'$'}{var.deployment_location}a", "${'$'}{var.deployment_location}b", "${'$'}{var.deployment_location}c"] + private_subnets = ["${'$'}{var.ip_prefix}.1.0/24", "${'$'}{var.ip_prefix}.2.0/24", "${'$'}{var.ip_prefix}.3.0/24"] + public_subnets = ["${'$'}{var.ip_prefix}.101.0/24", "${'$'}{var.ip_prefix}.102.0/24", "${'$'}{var.ip_prefix}.103.0/24"] + + enable_nat_gateway = true + single_nat_gateway = true + enable_vpn_gateway = false + enable_dns_hostnames = false + enable_dns_support = true + } + """.trimIndent() + ) + } + appendLine( + """ + + resource "aws_vpc_endpoint" "s3" { + vpc_id = ${project.vpc_id} + service_name = "com.amazonaws.${'$'}{var.deployment_location}.s3" + route_table_ids = ${project.public_route_table_ids} + } + resource "aws_vpc_endpoint" "executeapi" { + vpc_id = ${project.vpc_id} + service_name = "com.amazonaws.${'$'}{var.deployment_location}.execute-api" + security_group_ids = [aws_security_group.executeapi.id] + vpc_endpoint_type = "Interface" + } + resource "aws_vpc_endpoint" "lambdainvoke" { + vpc_id = ${project.vpc_id} + service_name = "com.amazonaws.${'$'}{var.deployment_location}.lambda" + security_group_ids = [aws_security_group.lambdainvoke.id] + vpc_endpoint_type = "Interface" + } + + resource "aws_security_group" "internal" { + name = "$namePrefix-private" + vpc_id = ${project.vpc_id} + + ingress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ${project.subnet_cidr_blocks} + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ${project.subnet_cidr_blocks} + } + } + + resource "aws_security_group" "access_outside" { + name = "$namePrefix-access-outside" + vpc_id = ${project.vpc_id} + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + } + + resource "aws_security_group" "executeapi" { + name = "$namePrefix-execute-api" + vpc_id = ${project.vpc_id} + + ingress { + from_port = 443 + to_port = 443 + protocol = "tcp" + cidr_blocks = [${project.vpc_cidr_block}] + } + } + + resource "aws_security_group" "lambdainvoke" { + name = "$namePrefix-lambda-invoke" + vpc_id = ${project.vpc_id} + + ingress { + from_port = 443 + to_port = 443 + protocol = "tcp" + cidr_blocks = [${project.vpc_cidr_block}] + } + } + """.trimIndent() + ) + } + if (domain) { + appendLine( + """ + data "aws_route53_zone" "main" { + name = var.domain_name_zone + } + """.trimIndent() + ) + } + }, + ) +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/lambda.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/lambda.kt new file mode 100644 index 000000000..9352ea1a4 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/lambda.kt @@ -0,0 +1,984 @@ +@file:Suppress("DEPRECATION") + +package com.lightningkite.lightningserver.aws.terraform + +import com.lightningkite.lightningserver.schedule.Schedule +import com.lightningkite.lightningserver.schedule.Scheduler +import kotlinx.datetime.* +import kotlinx.datetime.TimeZone + + +internal fun awsLambdaCloudwatch(projectInfo: TerraformProjectInfo) = with(projectInfo) { + TerraformSection( + name = "cloudwatch", + inputs = listOf( + TerraformInput( + name = "emergencyInvocationsPerMinuteThreshold", + type = "number", + default = "null", + nullable = true, + description = "Number of Invocations Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use emergencyInvocations which allows defined both threshold and period)" + ), + TerraformInput( + name = "emergencyComputePerMinuteThreshold", + type = "number", + default = "null", + nullable = true, + description = "Milliseconds of Compute Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use emergencyCompute which allows defined both threshold and period)" + ), + TerraformInput( + name = "panicInvocationsPerMinuteThreshold", + type = "number", + default = "null", + nullable = true, + description = "Number of Invocations Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use panicInvocations which allows defined both threshold and period)" + ), + TerraformInput( + name = "panicComputePerMinuteThreshold", + type = "number", + default = "null", + nullable = true, + description = "Milliseconds of Compute Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use panicCompute which allows defined both threshold and period)" + ), + + TerraformInput( + name = "emergencyInvocations", + type = "object({ threshold = number, period = number, evaluationPeriods = number, dataPointsToAlarm = number })", + default = "null", + nullable = true, + description = "The configurations for the Emergency Invocation alarm. Threshold is the Number of Invocations, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm.", + validations = listOf( + Validation( + condition = "(var.emergencyInvocations == null ? true : var.emergencyInvocations.evaluationPeriods > 0)", + errorMessage = """"emergencyInvocations evaluationPeriods must be greater than 0"""", + ), + Validation( + condition = "(var.emergencyInvocations == null ? true : (var.emergencyInvocations.dataPointsToAlarm <= var.emergencyInvocations.evaluationPeriods && var.emergencyInvocations.dataPointsToAlarm > 0))", + errorMessage = """"emergencyInvocations dataPointsToAlarm must be greater than 0 and less than or equal to emergencyInvocations evaluationPeriods"""", + ) + ) + ), + TerraformInput( + name = "emergencyCompute", + type = "object({ threshold = number, period = number, statistic = string, evaluationPeriods = number, dataPointsToAlarm = number })", + default = "null", + nullable = true, + description = "The configurations for the Emergency Compute alarm. Threshold is the Milliseconds of Compute, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm.", + validations = listOf( + Validation( + condition = "(var.emergencyCompute == null ? true : contains([\"Sum\", \"Average\", \"Maximum\"], var.emergencyCompute.statistic))", + errorMessage = """"Allowed values for emergencyCompute statistic are: \"Sum\", \"Average\", \"Maximum\"."""" + ), + Validation( + condition = "(var.emergencyCompute == null ? true : var.emergencyCompute.evaluationPeriods > 0)", + errorMessage = """"emergencyCompute evaluationPeriods must be greater than 0"""", + ), + Validation( + condition = "(var.emergencyCompute == null ? true : (var.emergencyCompute.dataPointsToAlarm <= var.emergencyCompute.evaluationPeriods && var.emergencyCompute.dataPointsToAlarm > 0))", + errorMessage = """"emergencyCompute dataPointsToAlarm must be greater than 0 and less than or equal to emergencyCompute evaluationPeriods"""", + ) + ) + ), + TerraformInput( + name = "panicInvocations", + type = "object({ threshold = number, period = number, evaluationPeriods = number, dataPointsToAlarm = number })", + default = "null", + nullable = true, + description = "The configurations for the Panic Invocations alarm. Threshold is the Number of Invocations, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm.", + validations = listOf( + Validation( + condition = "(var.panicInvocations == null ? true : var.panicInvocations.evaluationPeriods > 0)", + errorMessage = """"panicInvocations evaluationPeriods must be greater than 0"""", + ), + Validation( + condition = "(var.panicInvocations == null ? true : (var.panicInvocations.dataPointsToAlarm <= var.panicInvocations.evaluationPeriods && var.panicInvocations.dataPointsToAlarm > 0))", + errorMessage = """"panicInvocations dataPointsToAlarm must be greater than 0 and less than or equal to panicInvocations evaluationPeriods"""", + ) + ) + ), + TerraformInput( + name = "panicCompute", + type = "object({ threshold = number, period = number, statistic = string, evaluationPeriods = number, dataPointsToAlarm = number })", + default = "null", + nullable = true, + description = "The configurations for the Panic Compute alarm. Threshold is the Milliseconds of Compute, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm.", + validations = listOf( + Validation( + condition = "(var.panicCompute == null ? true : contains([\"Sum\", \"Average\", \"Maximum\"], var.panicCompute.statistic))", + errorMessage = """"Allowed values for panicCompute statistic are: \"Sum\", \"Average\", \"Maximum\"."""" + ), + Validation( + condition = "(var.panicCompute == null ? true : var.panicCompute.evaluationPeriods > 0)", + errorMessage = """"panicCompute evaluationPeriods must be greater than 0"""", + ), + Validation( + condition = "(var.panicCompute == null ? true : (var.panicCompute.dataPointsToAlarm <= var.panicCompute.evaluationPeriods && var.panicCompute.dataPointsToAlarm > 0))", + errorMessage = """"panicCompute dataPointsToAlarm must be greater than 0 and less than or equal to panicCompute evaluationPeriods"""", + ) + ) + ), + + TerraformInput.string( + "emergencyContact", + null, + nullable = true, + description = "The email address that will receive emails when alarms are triggered." + ) + ), + emit = { + appendLine( + """ + locals { + anyNotifications = (var.emergencyContact != null && + (var.emergencyInvocationsPerMinuteThreshold != null || + var.emergencyComputePerMinuteThreshold != null || + var.panicInvocationsPerMinuteThreshold != null || + var.panicComputePerMinuteThreshold != null || + var.emergencyInvocations != null || + var.emergencyCompute != null || + var.panicInvocations != null || + var.panicCompute != null)) + } + resource "aws_sns_topic" "emergency" { + count = local.anyNotifications ? 1 : 0 + name = "${namePrefix}_emergencies" + } + resource "aws_sns_topic_subscription" "emergency_primary" { + count = local.anyNotifications ? 1 : 0 + topic_arn = aws_sns_topic.emergency[0].arn + protocol = "email" + endpoint = var.emergencyContact + } + resource "aws_cloudwatch_metric_alarm" "emergency_minute_invocations" { + count = local.anyNotifications && var.emergencyInvocationsPerMinuteThreshold != null ? 1 : 0 + alarm_name = "${namePrefix}_emergency_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.emergencyInvocationsPerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + resource "aws_cloudwatch_metric_alarm" "emergency_minute_compute" { + count = local.anyNotifications && var.emergencyComputePerMinuteThreshold != null ? 1 : 0 + alarm_name = "${namePrefix}_emergency_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Duration" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.emergencyComputePerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + resource "aws_cloudwatch_metric_alarm" "panic_minute_invocations" { + count = local.anyNotifications && var.panicInvocationsPerMinuteThreshold != null ? 1 : 0 + alarm_name = "${namePrefix}_panic_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.panicInvocationsPerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + resource "aws_cloudwatch_metric_alarm" "panic_minute_compute" { + count = local.anyNotifications && var.panicComputePerMinuteThreshold != null ? 1 : 0 + alarm_name = "${namePrefix}_panic_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Duration" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.panicComputePerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + + + resource "aws_cloudwatch_metric_alarm" "emergency_invocations" { + count = (local.anyNotifications && + var.emergencyInvocations != null ? + 1 : 0) + alarm_name = "${namePrefix}_emergency_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.emergencyInvocations.evaluationPeriods + datapoints_to_alarm = var.emergencyInvocations.dataPointsToAlarm + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = var.emergencyInvocations.period * 60 + statistic = "Sum" + threshold = var.emergencyInvocations.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + resource "aws_cloudwatch_metric_alarm" "emergency_compute" { + count = (local.anyNotifications && + var.emergencyCompute != null ? + 1 : 0) + alarm_name = "${namePrefix}_emergency_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.emergencyCompute.evaluationPeriods + datapoints_to_alarm = var.emergencyCompute.dataPointsToAlarm + metric_name = "Duration" + namespace = "AWS/Lambda" + period = var.emergencyCompute.period * 60 + statistic = var.emergencyCompute.statistic + threshold = var.emergencyCompute.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + resource "aws_cloudwatch_metric_alarm" "panic_invocations" { + count = (local.anyNotifications && + var.panicInvocations != null ? + 1 : 0) + alarm_name = "${namePrefix}_panic_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.panicInvocations.evaluationPeriods + datapoints_to_alarm = var.panicInvocations.dataPointsToAlarm + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = var.panicInvocations.period * 60 + statistic = "Sum" + threshold = var.panicInvocations.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + resource "aws_cloudwatch_metric_alarm" "panic_compute" { + count = (local.anyNotifications && + var.panicCompute != null ? + 1 : 0) + alarm_name = "${namePrefix}_panic_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.panicCompute.evaluationPeriods + datapoints_to_alarm = var.panicCompute.dataPointsToAlarm + metric_name = "Duration" + namespace = "AWS/Lambda" + period = var.panicCompute.period * 60 + statistic = var.panicCompute.statistic + threshold = var.panicCompute.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] + } + + + resource "aws_api_gateway_account" "main" { + cloudwatch_role_arn = aws_iam_role.cloudwatch.arn + } + + resource "aws_iam_role" "cloudwatch" { + name = "${namePrefixSafe}" + + assume_role_policy = < { + val utcTime = + LocalDateTime(LocalDate(2001, 1, 1), s.time).toInstant(s.zone).toLocalDateTime(TimeZone.UTC) + TerraformSection( + name = "schedule_${it.name}", + emit = { + appendLine( + """ + resource "aws_cloudwatch_event_rule" "scheduled_task_${safeName}" { + name = "${namePrefix}_${safeName}" + schedule_expression = "cron(${utcTime.minute} ${utcTime.hour} * * ? *)" + } + resource "aws_cloudwatch_event_target" "scheduled_task_${safeName}" { + rule = aws_cloudwatch_event_rule.scheduled_task_${safeName}.name + target_id = "lambda" + arn = aws_lambda_alias.main.arn + input = "{\"scheduled\": \"${it.name}\"}" + } + resource "aws_lambda_permission" "scheduled_task_${safeName}" { + action = "lambda:InvokeFunction" + function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" + principal = "events.amazonaws.com" + source_arn = aws_cloudwatch_event_rule.scheduled_task_${safeName}.arn + lifecycle { + create_before_destroy = $createBeforeDestroy + } + } + """.trimIndent() + ) + } + ) + } + + is Schedule.Frequency -> { + TerraformSection( + name = "schedule_${it.name}", + emit = { + appendLine( + """ + resource "aws_cloudwatch_event_rule" "scheduled_task_${safeName}" { + name = "${namePrefix}_${safeName}" + schedule_expression = "rate(${s.gap.inWholeMinutes} minute${if (s.gap.inWholeMinutes > 1) "s" else ""})" + } + resource "aws_cloudwatch_event_target" "scheduled_task_${safeName}" { + rule = aws_cloudwatch_event_rule.scheduled_task_${safeName}.name + target_id = "lambda" + arn = aws_lambda_alias.main.arn + input = "{\"scheduled\": \"${it.name}\"}" + } + resource "aws_lambda_permission" "scheduled_task_${safeName}" { + action = "lambda:InvokeFunction" + function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" + principal = "events.amazonaws.com" + source_arn = aws_cloudwatch_event_rule.scheduled_task_${safeName}.arn + lifecycle { + create_before_destroy = $createBeforeDestroy + } + } + """.trimIndent() + ) + } + ) + } + + is Schedule.Cron -> { + TerraformSection( + name = "schedule_${it.name}", + emit = { + appendLine( + """ + resource "aws_cloudwatch_event_rule" "scheduled_task_${safeName}" { + name = "${namePrefix}_${safeName}" + schedule_expression = "cron(${s.cron})" + } + resource "aws_cloudwatch_event_target" "scheduled_task_${safeName}" { + rule = aws_cloudwatch_event_rule.scheduled_task_${safeName}.name + target_id = "lambda" + arn = aws_lambda_alias.main.arn + input = "{\"scheduled\": \"${it.name}\"}" + } + resource "aws_lambda_permission" "scheduled_task_${safeName}" { + action = "lambda:InvokeFunction" + function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" + principal = "events.amazonaws.com" + source_arn = aws_cloudwatch_event_rule.scheduled_task_${safeName}.arn + lifecycle { + create_before_destroy = $createBeforeDestroy + } + } + """.trimIndent() + ) + } + ) + } + } + } +} + +internal fun awsLambdaHandler( + project: TerraformProjectInfo, + handlerFqn: String, + otherSections: List, +) = TerraformSection( + name = "lambda", + inputs = listOf( + TerraformInput.number( + "lambda_memory_size", + 1024, + description = "The amount of ram available (in Megabytes) to the virtual machine running in Lambda." + ), + TerraformInput.number( + "lambda_timeout", + 30, + description = "How long an individual lambda invocation can run before forcefully being shut down." + ), + TerraformInput.boolean( + "lambda_snapstart", + false, + description = "Whether or not lambda will deploy with SnapStart which compromises deploy time for shorter cold start time." + ), + ), + emit = { + appendLine(""" + resource "aws_s3_bucket" "lambda_bucket" { + bucket_prefix = "${project.namePrefixPathSegment}-lambda-bucket" + force_destroy = true + } + + resource "aws_iam_role" "main_exec" { + name = "${project.namePrefix}-main-exec" + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [{ + Action = "sts:AssumeRole" + Effect = "Allow" + Sid = "" + Principal = { + Service = "lambda.amazonaws.com" + } + } + ] + }) + } + + resource "aws_iam_policy" "bucketDynamoAndInvoke" { + name = "${project.namePrefix}-bucketDynamoAndInvoke" + path = "/${project.namePrefixPath}/bucketDynamoAndInvoke/" + description = "Access to the ${project.namePrefix} bucket, dynamo, and invoke" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "s3:GetObject", + ] + Effect = "Allow" + Resource = [ + "${'$'}{aws_s3_bucket.lambda_bucket.arn}", + "${'$'}{aws_s3_bucket.lambda_bucket.arn}/*", + ] + }, + { + Action = [ + "dynamodb:*", + ] + Effect = "Allow" + Resource = ["*"] + }, + { + Action = [ + "lambda:InvokeFunction", + ] + Effect = "Allow" + Resource = "*" + }, + ] + }) + } + + resource "aws_iam_role_policy_attachment" "bucketDynamoAndInvoke" { + role = aws_iam_role.main_exec.name + policy_arn = aws_iam_policy.bucketDynamoAndInvoke.arn + } + resource "aws_iam_role_policy_attachment" "main_policy_exec" { + role = aws_iam_role.main_exec.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" + } + resource "aws_iam_role_policy_attachment" "main_policy_vpc" { + role = aws_iam_role.main_exec.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" + } + resource "aws_iam_role_policy_attachment" "insights_policy" { + role = aws_iam_role.main_exec.id + policy_arn = "arn:aws:iam::aws:policy/CloudWatchLambdaInsightsExecutionRolePolicy" + } + ${otherSections.flatMap { it.policies }.joinToString("\n") { + """ + resource "aws_iam_role_policy_attachment" "$it" { + role = aws_iam_role.main_exec.id + policy_arn = aws_iam_policy.$it.arn + } + """ + }} + + resource "aws_s3_object" "app_storage" { + bucket = aws_s3_bucket.lambda_bucket.id + + key = "lambda-functions.zip" + source = data.archive_file.lambda.output_path + + source_hash = data.archive_file.lambda.output_md5 + depends_on = [data.archive_file.lambda] + } + + resource "aws_lambda_function" "main" { + function_name = "${project.namePrefix}-main" + publish = var.lambda_snapstart + + s3_bucket = aws_s3_bucket.lambda_bucket.id + s3_key = aws_s3_object.app_storage.key + + runtime = "java17" + handler = "$handlerFqn" + + memory_size = "${'$'}{var.lambda_memory_size}" + timeout = var.lambda_timeout + # memory_size = "1024" + + source_code_hash = data.archive_file.lambda.output_base64sha256 + + role = aws_iam_role.main_exec.arn + + snap_start { + apply_on = "PublishedVersions" + } + + ${ + if (project.vpc) + """ + | vpc_config { + | subnet_ids = ${project.privateSubnets} + | security_group_ids = [aws_security_group.internal.id, aws_security_group.access_outside.id] + | } + """.trimMargin() + else + "" + } + + environment { + variables = { + LIGHTNING_SERVER_SETTINGS_DECRYPTION = random_password.settings.result + } + } + + depends_on = [aws_s3_object.app_storage] + } + + resource "aws_lambda_alias" "main" { + name = "prod" + description = "The current production version of the lambda." + function_name = aws_lambda_function.main.arn + function_version = var.lambda_snapstart ? aws_lambda_function.main.version : "${'$'}LATEST" + } + + resource "aws_cloudwatch_log_group" "main" { + name = "${project.namePrefix}-main-log" + retention_in_days = 30 + } + + resource "local_sensitive_file" "settings_raw" { + content = jsonencode({ + ${ + otherSections.mapNotNull { it.toLightningServer }.flatMap { it.entries }.map { "${it.key} = ${it.value}" } + .map { it.replace("\n", "\n ") }.joinToString("\n ") + }}) + filename = "${'$'}{path.module}/build/raw-settings.json" + } + + locals { + # Directories start with "C:..." on Windows; All other OSs use "/" for root. + is_windows = substr(pathexpand("~"), 0, 1) == "/" ? false : true + } + resource "null_resource" "lambda_jar_source" { + triggers = { + always = timestamp() + } + provisioner "local-exec" { + command = (local.is_windows ? "if(test-path \"${'$'}{path.module}/build/lambda/\") { rd -Recurse \"${'$'}{path.module}/build/lambda/\" }" : "rm -rf \"${'$'}{path.module}/build/lambda/\"") + interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] + } + provisioner "local-exec" { + command = (local.is_windows ? "cp -r -force \"${'$'}{path.module}/../../build/dist/lambda/.\" \"${'$'}{path.module}/build/lambda/\"" : "cp -rf \"${'$'}{path.module}/../../build/dist/lambda/.\" \"${'$'}{path.module}/build/lambda/\"") + interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] + } + provisioner "local-exec" { + command = "openssl enc -aes-256-cbc -md sha256 -in \"${'$'}{local_sensitive_file.settings_raw.filename}\" -out \"${'$'}{path.module}/build/lambda/settings.enc\" -pass pass:${'$'}{random_password.settings.result}" + interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] + } + } + resource "null_resource" "settings_reread" { + triggers = { + settingsRawHash = local_sensitive_file.settings_raw.content + } + depends_on = [null_resource.lambda_jar_source] + provisioner "local-exec" { + command = "openssl enc -d -aes-256-cbc -md sha256 -out \"${'$'}{local_sensitive_file.settings_raw.filename}.decrypted.json\" -in \"${'$'}{path.module}/build/lambda/settings.enc\" -pass pass:${'$'}{random_password.settings.result}" + interpreter = local.is_windows ? ["PowerShell", "-Command"] : [] + } + } + + resource "random_password" "settings" { + length = 32 + special = true + override_special = "_" + } + + data "archive_file" "lambda" { + depends_on = [null_resource.lambda_jar_source, null_resource.settings_reread] + type = "zip" + source_dir = "${'$'}{path.module}/build/lambda" + output_path = "${'$'}{path.module}/build/lambda.jar" + } + + + """.trimIndent() + ) + } +) + +internal fun httpAwsHandler(projectInfo: TerraformProjectInfo) = TerraformSection( + name = "http", + emit = { + appendLine( + """ + resource "aws_apigatewayv2_api" "http" { + name = "${projectInfo.namePrefix}-http" + protocol_type = "HTTP" + } + + resource "aws_apigatewayv2_stage" "http" { + api_id = aws_apigatewayv2_api.http.id + + name = "${projectInfo.namePrefix}-gateway-stage" + auto_deploy = true + + access_log_settings { + destination_arn = aws_cloudwatch_log_group.http_api.arn + + format = jsonencode({ + requestId = "${'$'}context.requestId" + sourceIp = "${'$'}context.identity.sourceIp" + requestTime = "${'$'}context.requestTime" + protocol = "${'$'}context.protocol" + httpMethod = "${'$'}context.httpMethod" + resourcePath = "${'$'}context.resourcePath" + routeKey = "${'$'}context.routeKey" + status = "${'$'}context.status" + responseLength = "${'$'}context.responseLength" + integrationErrorMessage = "${'$'}context.integrationErrorMessage" + } + ) + } + } + + resource "aws_apigatewayv2_integration" "http" { + api_id = aws_apigatewayv2_api.http.id + + integration_uri = aws_lambda_alias.main.invoke_arn + integration_type = "AWS_PROXY" + integration_method = "POST" + } + + resource "aws_cloudwatch_log_group" "http_api" { + name = "${projectInfo.namePrefix}-http-gateway-log" + + retention_in_days = 30 + } + + resource "aws_apigatewayv2_route" "http" { + api_id = aws_apigatewayv2_api.http.id + route_key = "${'$'}default" + target = "integrations/${'$'}{aws_apigatewayv2_integration.http.id}" + } + + resource "aws_lambda_permission" "api_gateway_http" { + action = "lambda:InvokeFunction" + function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" + principal = "apigateway.amazonaws.com" + + source_arn = "${'$'}{aws_apigatewayv2_api.http.execution_arn}/*/*" + lifecycle { + create_before_destroy = ${projectInfo.createBeforeDestroy} + } + } + """.trimIndent() + ) + if (projectInfo.domain) { + appendLine( + """ + resource "aws_acm_certificate" "http" { + domain_name = var.domain_name + validation_method = "DNS" + } + resource "aws_route53_record" "http" { + zone_id = data.aws_route53_zone.main.zone_id + name = tolist(aws_acm_certificate.http.domain_validation_options)[0].resource_record_name + type = tolist(aws_acm_certificate.http.domain_validation_options)[0].resource_record_type + records = [tolist(aws_acm_certificate.http.domain_validation_options)[0].resource_record_value] + ttl = "300" + } + resource "aws_acm_certificate_validation" "http" { + certificate_arn = aws_acm_certificate.http.arn + validation_record_fqdns = [aws_route53_record.http.fqdn] + } + resource aws_apigatewayv2_domain_name http { + domain_name = var.domain_name + domain_name_configuration { + certificate_arn = aws_acm_certificate.http.arn + endpoint_type = "REGIONAL" + security_policy = "TLS_1_2" + } + depends_on = [aws_acm_certificate_validation.http] + } + resource aws_apigatewayv2_api_mapping http { + stage = aws_apigatewayv2_stage.http.id + api_id = aws_apigatewayv2_stage.http.api_id + domain_name = aws_apigatewayv2_domain_name.http.domain_name + } + resource aws_route53_record httpAccess { + type = "A" + name = aws_apigatewayv2_domain_name.http.domain_name + zone_id = data.aws_route53_zone.main.id + alias { + evaluate_target_health = false + name = aws_apigatewayv2_domain_name.http.domain_name_configuration[0].target_domain_name + zone_id = aws_apigatewayv2_domain_name.http.domain_name_configuration[0].hosted_zone_id + } + } + """.trimIndent() + ) + } + }, + outputs = listOf( + TerraformOutput("http_url", "aws_apigatewayv2_stage.http.invoke_url"), + TerraformOutput( + "http", """ + { + id = aws_apigatewayv2_stage.http.id + api_id = aws_apigatewayv2_stage.http.api_id + invoke_url = aws_apigatewayv2_stage.http.invoke_url + arn = aws_apigatewayv2_stage.http.arn + name = aws_apigatewayv2_stage.http.name + } + """.trimIndent() + ), + ) +) + +internal fun wsAwsHandler(projectInfo: TerraformProjectInfo) = TerraformSection( + name = "websockets", + emit = { + appendLine( + """ + resource "aws_apigatewayv2_api" "ws" { + name = "${projectInfo.namePrefix}-gateway" + protocol_type = "WEBSOCKET" + route_selection_expression = "constant" + } + + resource "aws_apigatewayv2_stage" "ws" { + api_id = aws_apigatewayv2_api.ws.id + + name = "${projectInfo.namePrefix}-gateway-stage" + auto_deploy = true + + access_log_settings { + destination_arn = aws_cloudwatch_log_group.ws_api.arn + + format = jsonencode({ + requestId = "${'$'}context.requestId" + sourceIp = "${'$'}context.identity.sourceIp" + requestTime = "${'$'}context.requestTime" + protocol = "${'$'}context.protocol" + httpMethod = "${'$'}context.httpMethod" + resourcePath = "${'$'}context.resourcePath" + routeKey = "${'$'}context.routeKey" + status = "${'$'}context.status" + responseLength = "${'$'}context.responseLength" + integrationErrorMessage = "${'$'}context.integrationErrorMessage" + } + ) + } + } + + resource "aws_apigatewayv2_integration" "ws" { + api_id = aws_apigatewayv2_api.ws.id + + integration_uri = aws_lambda_alias.main.invoke_arn + integration_type = "AWS_PROXY" + integration_method = "POST" + } + + resource "aws_cloudwatch_log_group" "ws_api" { + name = "${projectInfo.namePrefix}-ws-gateway-log" + + retention_in_days = 30 + } + + resource "aws_apigatewayv2_route" "ws_connect" { + api_id = aws_apigatewayv2_api.ws.id + + route_key = "${'$'}connect" + target = "integrations/${'$'}{aws_apigatewayv2_integration.ws.id}" + } + resource "aws_apigatewayv2_route" "ws_default" { + api_id = aws_apigatewayv2_api.ws.id + + route_key = "${'$'}default" + target = "integrations/${'$'}{aws_apigatewayv2_integration.ws.id}" + } + resource "aws_apigatewayv2_route" "ws_disconnect" { + api_id = aws_apigatewayv2_api.ws.id + + route_key = "${'$'}disconnect" + target = "integrations/${'$'}{aws_apigatewayv2_integration.ws.id}" + } + + resource "aws_lambda_permission" "api_gateway_ws" { + action = "lambda:InvokeFunction" + function_name = "${'$'}{aws_lambda_alias.main.function_name}:${'$'}{aws_lambda_alias.main.name}" + principal = "apigateway.amazonaws.com" + + source_arn = "${'$'}{aws_apigatewayv2_api.ws.execution_arn}/*/*" + lifecycle { + create_before_destroy = ${projectInfo.createBeforeDestroy} + } + } + + resource "aws_iam_policy" "api_gateway_ws" { + name = "${projectInfo.namePrefix}-api_gateway_ws" + path = "/${projectInfo.namePrefixPath}/api_gateway_ws/" + description = "Access to the ${projectInfo.namePrefix}_api_gateway_ws management" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "execute-api:ManageConnections" + ] + Effect = "Allow" + Resource = "*" + }, + ] + }) + } + resource "aws_iam_role_policy_attachment" "api_gateway_ws" { + role = aws_iam_role.main_exec.name + policy_arn = aws_iam_policy.api_gateway_ws.arn + } + """.trimIndent() + ) + if (projectInfo.domain) { + appendLine( + """ + resource "aws_acm_certificate" "ws" { + domain_name = "ws.${'$'}{var.domain_name}" + validation_method = "DNS" + } + resource "aws_route53_record" "ws" { + zone_id = data.aws_route53_zone.main.zone_id + name = tolist(aws_acm_certificate.ws.domain_validation_options)[0].resource_record_name + type = tolist(aws_acm_certificate.ws.domain_validation_options)[0].resource_record_type + records = [tolist(aws_acm_certificate.ws.domain_validation_options)[0].resource_record_value] + ttl = "300" + } + resource "aws_acm_certificate_validation" "ws" { + certificate_arn = aws_acm_certificate.ws.arn + validation_record_fqdns = [aws_route53_record.ws.fqdn] + } + resource aws_apigatewayv2_domain_name ws { + domain_name = "ws.${'$'}{var.domain_name}" + domain_name_configuration { + certificate_arn = aws_acm_certificate.ws.arn + endpoint_type = "REGIONAL" + security_policy = "TLS_1_2" + } + depends_on = [aws_acm_certificate_validation.ws] + } + resource aws_apigatewayv2_api_mapping ws { + stage = aws_apigatewayv2_stage.ws.id + api_id = aws_apigatewayv2_stage.ws.api_id + domain_name = aws_apigatewayv2_domain_name.ws.domain_name + } + resource aws_route53_record wsAccess { + type = "A" + name = aws_apigatewayv2_domain_name.ws.domain_name + zone_id = data.aws_route53_zone.main.id + alias { + evaluate_target_health = false + name = aws_apigatewayv2_domain_name.ws.domain_name_configuration[0].target_domain_name + zone_id = aws_apigatewayv2_domain_name.ws.domain_name_configuration[0].hosted_zone_id + } + } + """.trimIndent() + ) + } + }, + outputs = listOf( + TerraformOutput("ws_url", "aws_apigatewayv2_stage.ws.invoke_url"), + TerraformOutput( + "ws", """ + { + id = aws_apigatewayv2_stage.ws.id + api_id = aws_apigatewayv2_stage.ws.api_id + invoke_url = aws_apigatewayv2_stage.ws.invoke_url + arn = aws_apigatewayv2_stage.ws.arn + name = aws_apigatewayv2_stage.ws.name + } + """.trimIndent() + ), + ) +) diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/main.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/main.kt new file mode 100644 index 000000000..2a8a9b151 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/main.kt @@ -0,0 +1,226 @@ +package com.lightningkite.lightningserver.aws.terraform + +import com.lightningkite.lightningserver.serialization.Serialization +import com.lightningkite.lightningserver.settings.Settings +import kotlinx.serialization.json.Json +import java.io.File +import java.util.* + + +fun createTerraform(handlerFqn: String, projectName: String = "project", root: File) { + root.mkdirs() + root.listFiles()!!.filter { it.isDirectory }.plus( + root.resolve("example") + ).distinct().forEach { terraformEnvironmentAws(handlerFqn, it, projectName) } +} + +private fun terraformEnvironmentAws(handlerFqn: String, folder: File, projectName: String = "project") { + SettingsHandlers + + val projectInfoFile = folder.resolve("project.json") + folder.mkdirs() + val defaultHandlers = Settings.requirements.entries.associate { + it.key to (TerraformHandler.handlers[it.value.serializer]?.maxBy { it.value.priority }?.key ?: "Direct") + } + val info = projectInfoFile + .takeIf { it.exists() } + ?.readText() + ?.let { Serialization.Internal.json.decodeFromString(TerraformProjectInfo.serializer(), it) } + ?.let { it.copy(handlers = defaultHandlers + it.handlers) } + ?: TerraformProjectInfo( + projectName = projectName, + bucket = "your-deployment-bucket", + vpc = false, + domain = true, + profile = "default", + handlers = defaultHandlers, + ) + @Suppress("JSON_FORMAT_REDUNDANT") + projectInfoFile.writeText( + Json(Serialization.Internal.json) { prettyPrint = true } + .encodeToString(TerraformProjectInfo.serializer(), info) + ) + + val settingsSections = Settings.requirements.values.map { + val handler = TerraformHandler.handlers[it.serializer]?.let { handlers -> + val handlerName = info.handlers.get(it.name) + if (handlerName == "Direct") return@let null + handlers[handlerName]!! + } + handler?.makeSection?.invoke(info, it.name) ?: TerraformSection.default(it) + } + val allSections = when(info.core){ + TerraformCoreType.Lambda -> { + val sections = listOf( + settingsSections, + listOf(defaultAwsHandler(info)), + listOfNotNull( + httpAwsHandler(info), + wsAwsHandler(info), + awsLambdaCloudwatch(info), + ), + scheduleAwsHandlers(info) + ).flatten() + sections + awsLambdaHandler(info, handlerFqn, sections) + } + TerraformCoreType.SingleEC2 -> { + val others = settingsSections + defaultAwsHandler(info) + others + awsEc2Handler(info, others) + } + TerraformCoreType.ELB -> TODO() + } + + val sectionToFile = allSections.associateWith { section -> + folder.resolve(section.name.filter { it.isLetterOrDigit() } + ".tf") + } + val warning = "# Generated via Lightning Server. This file will be overwritten or deleted when regenerating." + folder.listFiles()!!.filter { + it.extension == "tf" && it.readText().contains(warning) + }.forEach { it.delete() } + for ((section, file) in sectionToFile) { +// if(!file.readText().contains(warning)) continue + file.printWriter().use { it -> + it.appendLine(warning) + it.appendLine("##########") + it.appendLine("# Inputs") + it.appendLine("##########") + it.appendLine() + for (input in section.inputs) { + it.appendLine("variable \"${input.name}\" {") + it.appendLine(" type = ${input.type}") + input.default?.let { d -> + it.appendLine(" default = $d") + } + it.appendLine(" nullable = ${input.nullable}") + input.description?.let { d -> + it.appendLine(" description = \"$d\"") + } + input.validations.forEach { validation -> + it.appendLine(" validation {") + it.appendLine(" condition = ${validation.condition}") + it.appendLine(" error_message = ${validation.errorMessage}") + it.appendLine(" }") + } + it.appendLine("}") + } + it.appendLine() + it.appendLine("##########") + it.appendLine("# Outputs") + it.appendLine("##########") + it.appendLine() + for (output in section.outputs) { + it.appendLine("output \"${output.name}\" {") + it.appendLine(" value = ${output.value}") + if(output.sensitive) { + it.appendLine(" sensitive = true") + } + it.appendLine("}") + } + it.appendLine() + it.appendLine("##########") + it.appendLine("# Resources") + it.appendLine("##########") + it.appendLine() + section.emit(it) + it.appendLine() + } + } + + val usingMongo = allSections.any { it.providers.any { it.name == "mongodbatlas" } } + if (usingMongo) { + fun get(name: String): String { + println("$name for profile ${info.profile}:") + return readln() + } + + val mongoCredsFile = File(System.getProperty("user.home")).resolve(".mongo/profiles/${info.profile}.env") + val mongoCredsFile2 = File(System.getProperty("user.home")).resolve(".mongo/profiles/${info.profile}.ps1") + mongoCredsFile.parentFile.mkdirs() + if (!mongoCredsFile.exists()) { + val mongoPublic = if (usingMongo) get("MongoDB Public Key") else null + val mongoPrivate = if (usingMongo) get("MongoDB Private Key") else null + mongoCredsFile.writeText( + """ + MONGODB_ATLAS_PUBLIC_KEY="$mongoPublic" + MONGODB_ATLAS_PRIVATE_KEY="$mongoPrivate" + """.trimIndent() + "\n" + ) + mongoCredsFile.setExecutable(true) + mongoCredsFile2.writeText( + """ + ${'$'}env:MONGODB_ATLAS_PUBLIC_KEY = "$mongoPublic" + ${'$'}env:MONGODB_ATLAS_PRIVATE_KEY = "$mongoPrivate" + """.trimIndent() + "\n" + ) + mongoCredsFile2.setExecutable(true) + } + } + + folder.resolve("tf").printWriter().use { + it.appendLine("#!/bin/bash") + it.appendLine("export AWS_PROFILE=${info.profile}") + if (usingMongo) { + it.appendLine( + """ + export ${'$'}(cat ~/.mongo/profiles/${info.profile}.env | xargs) + """.trimIndent() + ) + } + it.appendLine("terraform \"$@\"") + } + folder.resolve("tf").setExecutable(true) + + folder.resolve("tf.ps1").printWriter().use { + it.appendLine("\$env:AWS_PROFILE = \"${info.profile}\"") + if (usingMongo) { + it.appendLine( + """ + . ~/.mongo/profiles/${info.profile}.ps1 + """.trimIndent() + ) + } + it.appendLine("terraform \$args") + } + folder.resolve("tf.ps1").setExecutable(true) + + folder.resolve("main.tf").printWriter().use { + it.appendLine("""terraform {""") + it.appendLine(" required_providers {") + for (provider in allSections.flatMap { it.providers }.distinct()) { + it.appendLine(" ${provider.name} = {") + it.appendLine(" source = \"${provider.source}\"") + it.appendLine(" version = \"${provider.version}\"") + it.appendLine(" }") + } + it.appendLine(" }") + it.appendLine(""" backend "s3" {""") + it.appendLine(""" bucket = "${info.bucket}"""") + info.bucketPathOverride?.let { override -> + it.appendLine(""" key = "${override}"""") + } ?: run { + it.appendLine(""" key = "${info.projectNameSafe}/${folder.name}"""") + } + it.appendLine(""" region = "us-west-2"""") + it.appendLine(""" }""") + it.appendLine("""}""") + it.appendLine("""provider "aws" {""") + it.appendLine(""" region = "us-west-2"""") + it.appendLine("""}""") + it.appendLine("""provider "aws" {""") + it.appendLine(""" alias = "acm"""") + it.appendLine(""" region = "us-east-1"""") + it.appendLine("""}""") + if (usingMongo) { + it.appendLine( + """ + provider "mongodbatlas" { + } + """.trimIndent() + ) + } + } + + folder.resolve("terraform.tfvars").takeUnless { it.exists() }?.writeText( + allSections.flatMap { it.inputs }.distinct().joinToString("\n") { it.name + " = " + it.default } + "\n" + ) +} diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt new file mode 100644 index 000000000..e12faa2df --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt @@ -0,0 +1,890 @@ +package com.lightningkite.lightningserver.aws.terraform + +import com.lightningkite.lightningserver.auth.JwtSigner +import com.lightningkite.lightningserver.cache.CacheSettings +import com.lightningkite.lightningserver.db.DatabaseSettings +import com.lightningkite.lightningserver.email.EmailSettings +import com.lightningkite.lightningserver.encryption.SecretBasis +import com.lightningkite.lightningserver.files.FilesSettings +import com.lightningkite.lightningserver.metrics.MetricSettings +import com.lightningkite.lightningserver.metrics.MetricType +import com.lightningkite.lightningserver.settings.GeneralServerSettings + + +object SettingsHandlers { + val general = TerraformHandler.handler( + inputs = { + listOf( + TerraformInput( + name = "cors", + type = "object({ allowedDomains = list(string), allowedHeaders = list(string) })", + default = "null", + nullable = true, + description = "Defines the cors rules for the server." + ), + TerraformInput.string( + "display_name", + projectName, + description = "The GeneralSettings projectName." + ) + ) + }, + settingOutput = { + """ + { + projectName = var.display_name + publicUrl = ${if (domain) "\"https://${'$'}{var.domain_name}\"" else "aws_apigatewayv2_stage.http.invoke_url"} + wsUrl = ${if (domain) "\"wss://ws.${'$'}{var.domain_name}?path=\"" else "\"\${aws_apigatewayv2_stage.ws.invoke_url}?path=\""} + debug = var.debug + cors = var.cors + } + """.trimIndent() + } + ) + val S3 = TerraformHandler.handler( + name = "S3", + inputs = { key -> + listOf( + TerraformInput.string("${key}_expiry", "P1D", nullable = true) + ) + }, + emit = { + appendLine( + """ + resource "aws_s3_bucket" "${key}" { + bucket_prefix = "${namePrefixPathSegment}-${key.lowercase()}" + force_destroy = var.debug + } + resource "aws_s3_bucket_cors_configuration" "${key}" { + bucket = aws_s3_bucket.${key}.bucket + + cors_rule { + allowed_headers = ["*"] + allowed_methods = ["PUT", "POST"] + allowed_origins = ["*"] + expose_headers = ["ETag"] + max_age_seconds = 3000 + } + + cors_rule { + allowed_headers = ["*"] + allowed_methods = ["GET", "HEAD"] + allowed_origins = ["*"] + } + } + resource "aws_s3_bucket_public_access_block" "$key" { + count = var.${key}_expiry == null ? 1 : 0 + bucket = aws_s3_bucket.$key.id + + block_public_acls = false + block_public_policy = false + ignore_public_acls = false + restrict_public_buckets = false + } + resource "aws_s3_bucket_policy" "$key" { + depends_on = [aws_s3_bucket_public_access_block.${key}] + count = var.${key}_expiry == null ? 1 : 0 + bucket = aws_s3_bucket.$key.id + policy = < listOf(key) }, + settingOutput = { key -> + """ + { + storageUrl = "s3://${'$'}{aws_s3_bucket.${key}.id}.s3-${'$'}{aws_s3_bucket.${key}.region}.amazonaws.com" + signedUrlExpiration = var.${key}_expiry + } + """.trimIndent() + } + ) + val DocumentDB = TerraformHandler.handler( + name = "DocumentDB", + inputs = { key -> + listOf(TerraformInput.string("${key}_instance_class", "db.t4g.medium")) + }, + emit = { + if (!project.vpc) throw UnsupportedOperationException("DocumentDB requires VPC") + appendLine( + """ + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "aws_docdb_subnet_group" "${key}" { + name = "$namePrefix-${key}" + subnet_ids = ${project.privateSubnets} + } + resource "aws_docdb_cluster_parameter_group" "${key}" { + family = "docdb4.0" + name = "$namePrefix-${key}-parameter-group" + parameter { + name = "tls" + value = "disabled" + } + } + resource "aws_docdb_cluster" "${key}" { + cluster_identifier = "${namePrefix}-${key}" + engine = "docdb" + master_username = "master" + master_password = random_password.${key}.result + backup_retention_period = 5 + preferred_backup_window = "07:00-09:00" + skip_final_snapshot = true + + db_cluster_parameter_group_name = "${'$'}{aws_docdb_cluster_parameter_group.${key}.name}" + vpc_security_group_ids = [aws_security_group.internal.id] + db_subnet_group_name = "${'$'}{aws_docdb_subnet_group.${key}.name}" + } + resource "aws_docdb_cluster_instance" "${key}" { + count = 1 + identifier = "$namePrefix-${key}-${'$'}{count.index}" + cluster_identifier = "${'$'}{aws_docdb_cluster.${key}.id}" + instance_class = "db.t4g.medium" + } + """.trimIndent() + ) + }, + settingOutput = { key -> + """ + { + url = "mongodb://master:${'$'}{random_password.${key}.result}@${'$'}{aws_docdb_cluster_instance.${key}[0].endpoint}/?retryWrites=false" + databaseName = "${namePrefix}_${key}" + } + """.trimIndent() + } + ) + val `AuroraDB Serverless V1` = TerraformHandler.handler( + name = "AuroraDB Serverless V1", + priority = 1, + inputs = { key -> + listOf( + TerraformInput.number("${key}_min_capacity", 2), + TerraformInput.number("${key}_max_capacity", 4), + TerraformInput.boolean("${key}_auto_pause", true) + ) + }, + emit = { + if (!project.vpc) throw UnsupportedOperationException("DocumentDB requires VPC") + appendLine( + """ + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "aws_db_subnet_group" "${key}" { + name = "$namePrefix-${key}" + subnet_ids = ${project.privateSubnets} + } + resource "aws_rds_cluster" "$key" { + cluster_identifier = "$namePrefix-${key}" + engine = "aurora-postgresql" + engine_mode = "serverless" + engine_version = "10.18" + database_name = "$namePrefixSafe${key}" + master_username = "master" + master_password = random_password.${key}.result + skip_final_snapshot = var.debug + final_snapshot_identifier = "$namePrefix-${key}" + enable_http_endpoint = true + vpc_security_group_ids = [aws_security_group.internal.id] + db_subnet_group_name = "${'$'}{aws_db_subnet_group.${key}.name}" + + scaling_configuration { + auto_pause = var.${key}_auto_pause + min_capacity = var.${key}_min_capacity + max_capacity = var.${key}_max_capacity + seconds_until_auto_pause = 300 + timeout_action = "ForceApplyCapacityChange" + } + } + """.trimIndent() + ) + }, + settingOutput = { key -> + """ + { + url = "postgresql://master:${'$'}{random_password.${key}.result}@${'$'}{aws_rds_cluster.database.endpoint}/$namePrefixSafe${key}" + } + """.trimIndent() + } + ) + val `AuroraDB Serverless V2` = TerraformHandler.handler( + name = "AuroraDB Serverless V2", + priority = 2, + inputs = { key -> + listOf( + TerraformInput.number("${key}_min_capacity", 0.5), + TerraformInput.number("${key}_max_capacity", 2), + TerraformInput.boolean("${key}_auto_pause", true) + ) + }, + emit = { + if (project.vpc) { + appendLine( + """ + resource "aws_db_subnet_group" "${key}" { + name = "$namePrefix-${key}" + subnet_ids = ${project.privateSubnets} + } + """.trimIndent() + ) + } + appendLine( + """ + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "aws_rds_cluster" "$key" { + cluster_identifier = "$namePrefix-${key}" + engine = "aurora-postgresql" + engine_mode = "provisioned" + engine_version = "13.6" + database_name = "$namePrefixSafe${key}" + master_username = "master" + master_password = random_password.${key}.result + skip_final_snapshot = var.debug + final_snapshot_identifier = "$namePrefix-${key}" + ${if (project.vpc) """vpc_security_group_ids = [aws_security_group.internal.id]""" else ""} + ${if (project.vpc) """db_subnet_group_name = "${'$'}{aws_db_subnet_group.${key}.name}"""" else ""} + + serverlessv2_scaling_configuration { + min_capacity = var.${key}_min_capacity + max_capacity = var.${key}_max_capacity + } + } + + resource "aws_rds_cluster_instance" "$key" { + publicly_accessible = ${!project.vpc} + cluster_identifier = aws_rds_cluster.$key.id + instance_class = "db.serverless" + engine = aws_rds_cluster.$key.engine + engine_version = aws_rds_cluster.$key.engine_version + ${if (project.vpc) """db_subnet_group_name = "${'$'}{aws_db_subnet_group.${key}.name}"""" else ""} + } + """.trimIndent() + ) + }, + settingOutput = { key -> + //url = "${'$'}{var.${key}_auto_pause ? "auroradb-autopause" : "postgresql"}://master:${'$'}{random_password.${key}.result}@${'$'}{aws_rds_cluster.database.endpoint}/$namePrefixSafe${key}" + """ + { + url = "postgresql://master:${'$'}{random_password.${key}.result}@${'$'}{aws_rds_cluster.database.endpoint}/$namePrefixSafe${key}" + } + """.trimIndent() + } + ) + val `MongoDB Serverless` = TerraformHandler.handler( + name = "MongoDB Serverless", + priority = 0, + providers = listOf(TerraformProvider.mongodbatlas), + inputs = { key -> + listOf( + TerraformInput.string("${key}_org_id", null), + TerraformInput.boolean("${key}_continuous_backup", false), +// TerraformInput.string("${key}_team_id", null) + ) + }, + emit = { + appendLine( + """ + resource "mongodbatlas_project" "$key" { + name = "$namePrefixSafe$key" + org_id = var.${key}_org_id + + is_collect_database_specifics_statistics_enabled = true + is_data_explorer_enabled = true + is_performance_advisor_enabled = true + is_realtime_performance_panel_enabled = true + is_schema_advisor_enabled = true + } + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "mongodbatlas_serverless_instance" "$key" { + project_id = mongodbatlas_project.$key.id + name = "$namePrefixSafe$key" + + provider_settings_backing_provider_name = "AWS" + provider_settings_provider_name = "SERVERLESS" + provider_settings_region_name = replace(upper(var.deployment_location), "-", "_") + + continuous_backup_enabled = var.${key}_continuous_backup + } + resource "mongodbatlas_database_user" "$key" { + username = "$namePrefixSafe$key-main" + password = random_password.$key.result + project_id = mongodbatlas_project.$key.id + auth_database_name = "admin" + + roles { + role_name = "readWrite" + database_name = "default" + } + + roles { + role_name = "readAnyDatabase" + database_name = "admin" + } + + } + """.trimIndent() + ) + if (project.vpc) { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + for_each = toset(${project.natGatewayIp}) + project_id = mongodbatlas_project.$key.id + cidr_block = "${'$'}{each.value}/32" + comment = "NAT Gateway" + } + """.trimIndent() + ) + } else { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + project_id = mongodbatlas_project.$key.id + cidr_block = "0.0.0.0/0" + comment = "Anywhere" + } + """.trimIndent() + ) + } + }, + settingOutput = { key -> + """ + { + url = "mongodb+srv://$namePrefixSafe$key-main:${'$'}{random_password.${key}.result}@${'$'}{replace(mongodbatlas_serverless_instance.$key.connection_strings_standard_srv, "mongodb+srv://", "")}/default?retryWrites=true&w=majority" + } + """.trimIndent() + } + ) + val `MongoDB Dedicated` = TerraformHandler.handler( + name = "MongoDB Dedicated", + priority = 0, + providers = listOf(TerraformProvider.mongodbatlas), + inputs = { key -> + listOf( + TerraformInput.string("${key}_org_id", null), + TerraformInput.string("${key}_min_size", "M10"), + TerraformInput.string("${key}_max_size", "M40") + ) + }, + emit = { + appendLine( + """ + resource "mongodbatlas_project" "$key" { + name = "$namePrefixSafe$key" + org_id = var.${key}_org_id + + is_collect_database_specifics_statistics_enabled = true + is_data_explorer_enabled = true + is_performance_advisor_enabled = true + is_realtime_performance_panel_enabled = true + is_schema_advisor_enabled = true + } + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "mongodbatlas_advanced_cluster" "database" { + project_id = mongodbatlas_project.database.id + name = "$namePrefixSafe$key" + cluster_type = "REPLICASET" + # lifecycle { ignore_changes = [instance_size] } + replication_specs { + region_configs { + auto_scaling { + compute_enabled = true + compute_min_instance_size = "M10" + compute_max_instance_size = var.${key}_max_size + compute_scale_down_enabled = true + disk_gb_enabled = true + } + electable_specs { + instance_size = var.${key}_min_size + node_count = 3 + } + analytics_specs { + instance_size = var.${key}_min_size + node_count = 1 + } + priority = 7 + provider_name = "AWS" + region_name = replace(upper(var.deployment_location), "-", "_") + } + } + } + resource "mongodbatlas_database_user" "$key" { + username = "$namePrefixSafe$key-main" + password = random_password.$key.result + project_id = mongodbatlas_project.$key.id + auth_database_name = "admin" + + roles { + role_name = "readWrite" + database_name = "default" + } + + roles { + role_name = "readAnyDatabase" + database_name = "admin" + } + + } + """.trimIndent() + ) + if (project.vpc) { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + for_each = toset(${project.natGatewayIp}) + project_id = mongodbatlas_project.$key.id + cidr_block = "${'$'}{each.value}/32" + comment = "NAT Gateway" + } + """.trimIndent() + ) + } else { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + project_id = mongodbatlas_project.$key.id + cidr_block = "0.0.0.0/0" + comment = "Anywhere" + } + """.trimIndent() + ) + } + }, + settingOutput = { key -> + """ + { + url = "mongodb+srv://$namePrefixSafe$key-main:${'$'}{random_password.${key}.result}@${'$'}{replace(mongodbatlas_advanced_cluster.$key.connection_strings_standard_srv, "mongodb+srv://", "")}/default?retryWrites=true&w=majority" + } + """.trimIndent() + } + ) + val ElastiCache = TerraformHandler.handler( + name = "ElastiCache", + inputs = { key -> + listOf( + TerraformInput.string("${key}_node_type", "cache.t2.micro"), + TerraformInput.number("${key}_node_count", 1) + ) + }, + emit = { + if (!project.vpc) throw IllegalArgumentException("A VPC is required for ElastiCache for security purposes.") + appendLine( + """ + resource "aws_elasticache_cluster" "${key}" { + cluster_id = "${namePrefix}-${key}" + engine = "memcached" + node_type = var.${key}_node_type + num_cache_nodes = var.${key}_node_count + parameter_group_name = "default.memcached1.6" + port = 11211 + security_group_ids = [aws_security_group.internal.id] + subnet_group_name = aws_elasticache_subnet_group.${key}.name + } + resource "aws_elasticache_subnet_group" "${key}" { + name = "$namePrefix-${key}" + subnet_ids = ${project.privateSubnets} + } + """.trimIndent() + ) + }, + settingOutput = { key -> + """ + { + url = "memcached-aws://${'$'}{aws_elasticache_cluster.${key}.cluster_address}:11211" + } + """.trimIndent() + } + ) + val DynamoDB = TerraformHandler.handler( + name = "DynamoDB", + priority = 1, + settingOutput = { _ -> + """ + { + url = "dynamodb://${'$'}{var.deployment_location}/${namePrefixUnderscores}" + } + """.trimIndent() + } + ) + val jwtSigner = TerraformHandler.handler( + inputs = { key -> + listOf( + TerraformInput.string("${key}_expiration", "PT8760H"), + TerraformInput.string("${key}_emailExpiration", "PT1H"), + ) + }, + emit = { + appendLine( + """ + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "!#${'$'}%&*()-_=+[]{}<>:?" + } + """.trimIndent() + ) + }, + settingOutput = { key -> + """ + { + expiration = var.${key}_expiration + emailExpiration = var.${key}_emailExpiration + secret = random_password.${key}.result + } + """.trimIndent() + } + ) + val secretBasis = TerraformHandler.handler( + inputs = { _ -> + listOf( + ) + }, + emit = { + appendLine( + """ + resource "random_password" "${key}" { + length = 88 + special = true + override_special = "+/" + } + """.trimIndent() + ) + }, + settingOutput = { key -> + """ + random_password.${key}.result + """.trimIndent() + } + ) + val Cloudwatch = TerraformHandler.handler( + name = "Cloudwatch", + inputs = { key -> + listOf( + TerraformInput.stringList("${key}_tracked", MetricType.known.map { it.name }), + TerraformInput.string("${key}_namespace", this.projectName), + ) + }, + emit = { + appendLine( + """ + resource "aws_iam_policy" "${key}" { + name = "${namePrefix}-${key}" + path = "/${namePrefixPath}/${key}/" + description = "Access to publish metrics" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "cloudwatch:PutMetricData", + ] + Effect = "Allow" + Condition = { + StringEquals = { + "cloudwatch:namespace": var.${key}_namespace + } + } + Resource = ["*"] + }, + ] + }) + } + """.trimIndent() + ) + }, + policies = { key -> listOf(key) }, + settingOutput = { key -> + """ + { + url = "cloudwatch://${'$'}{var.deployment_location}/${'$'}{var.${key}_namespace}" + trackingByEntryPoint = var.${key}_tracked + } + """.trimIndent() + } + ) + val `SMTP through SES` = TerraformHandler.handler( + name = "SMTP through SES", + inputs = { key -> + if (domain) { + listOf( + TerraformInput.string("reporting_email", null) + ) + } else { + listOf( + TerraformInput.string("${key}_sender", null) + ) + } + }, + emit = { + appendLine( + """ + resource "aws_iam_user" "${key}" { + name = "${namePrefix}-${key}-user" + } + + resource "aws_iam_access_key" "${key}" { + user = aws_iam_user.${key}.name + } + + data "aws_iam_policy_document" "${key}" { + statement { + actions = ["ses:SendRawEmail"] + resources = ["*"] + } + } + + resource "aws_iam_policy" "${key}" { + name = "${namePrefix}-${key}-policy" + description = "Allows sending of e-mails via Simple Email Service" + policy = data.aws_iam_policy_document.${key}.json + } + + resource "aws_iam_user_policy_attachment" "${key}" { + user = aws_iam_user.${key}.name + policy_arn = aws_iam_policy.${key}.arn + } + + """.trimIndent() + ) + + if (project.vpc) { + appendLine( + """ + resource "aws_security_group" "${key}" { + name = "${namePrefix}-${key}" + vpc_id = ${project.vpc_id} + + ingress { + from_port = 587 + to_port = 587 + protocol = "tcp" + cidr_blocks = [${project.vpc_cidr_block}] + } + } + resource "aws_vpc_endpoint" "${key}" { + vpc_id = ${project.vpc_id} + service_name = "com.amazonaws.${'$'}{var.deployment_location}.email-smtp" + security_group_ids = [aws_security_group.${key}.id] + vpc_endpoint_type = "Interface" + } + """.trimIndent() + ) + } + + if (project.domain) { + appendLine( + """ + resource "aws_ses_domain_identity" "${key}" { + domain = var.domain_name + } + resource "aws_ses_domain_mail_from" "$key" { + domain = aws_ses_domain_identity.$key.domain + mail_from_domain = "mail.${'$'}{var.domain_name}" + } + resource "aws_route53_record" "${key}_mx" { + zone_id = data.aws_route53_zone.main.zone_id + name = aws_ses_domain_mail_from.$key.mail_from_domain + type = "MX" + ttl = "600" + records = ["10 feedback-smtp.${'$'}{var.deployment_location}.amazonses.com"] # Change to the region in which `aws_ses_domain_identity.example` is created + } + resource "aws_route53_record" "${key}" { + zone_id = data.aws_route53_zone.main.zone_id + name = "_amazonses.${'$'}{var.domain_name}" + type = "TXT" + ttl = "600" + records = [aws_ses_domain_identity.${key}.verification_token] + } + resource "aws_ses_domain_dkim" "${key}_dkim" { + domain = aws_ses_domain_identity.${key}.domain + } + resource "aws_route53_record" "${key}_spf_mail_from" { + zone_id = data.aws_route53_zone.main.zone_id + name = aws_ses_domain_mail_from.$key.mail_from_domain + type = "TXT" + ttl = "300" + records = [ + "v=spf1 include:amazonses.com -all" + ] + } + resource "aws_route53_record" "${key}_spf_domain" { + zone_id = data.aws_route53_zone.main.zone_id + name = aws_ses_domain_identity.$key.domain + type = "TXT" + ttl = "300" + records = [ + "v=spf1 include:amazonses.com -all" + ] + } + resource "aws_route53_record" "${key}_dkim_records" { + count = 3 + zone_id = data.aws_route53_zone.main.zone_id + name = "${'$'}{element(aws_ses_domain_dkim.${key}_dkim.dkim_tokens, count.index)}._domainkey.${'$'}{var.domain_name}" + type = "CNAME" + ttl = "300" + records = [ + "${'$'}{element(aws_ses_domain_dkim.${key}_dkim.dkim_tokens, count.index)}.dkim.amazonses.com", + ] + } + resource "aws_route53_record" "${key}_route_53_dmarc_txt" { + zone_id = data.aws_route53_zone.main.zone_id + name = "_dmarc.${'$'}{var.domain_name}" + type = "TXT" + ttl = "300" + records = [ + "v=DMARC1;p=quarantine;pct=75;rua=mailto:${'$'}{var.reporting_email}" + ] + } + """.trimIndent() + ) + } else { + appendLine( + """ + resource "aws_ses_email_identity" "${key}" { + email = var.${key}_sender + } + """.trimIndent() + ) + } + }, + settingOutput = { key -> + """ + { + url = "smtp://${'$'}{aws_iam_access_key.${key}.id}:${'$'}{aws_iam_access_key.${key}.ses_smtp_password_v4}@email-smtp.${'$'}{var.deployment_location}.amazonaws.com:587" + fromEmail = ${if (domain) "\"noreply@${'$'}{var.domain_name}\"" else "var.${key}_sender"} + } + """.trimIndent() + } + ) + val `SMTP through SES with Existing Identity` = TerraformHandler.handler( + name = "SMTP through SES with Existing Identity", + inputs = { key -> + listOf( + TerraformInput.string("${key}_sender", null) + ) + }, + emit = { + appendLine( + """ + resource "aws_iam_user" "${key}" { + name = "${namePrefix}-${key}-user" + } + + resource "aws_iam_access_key" "${key}" { + user = aws_iam_user.${key}.name + } + + data "aws_iam_policy_document" "${key}" { + statement { + actions = ["ses:SendRawEmail"] + resources = ["*"] + } + } + + resource "aws_iam_policy" "${key}" { + name = "${namePrefix}-${key}-policy" + description = "Allows sending of e-mails via Simple Email Service" + policy = data.aws_iam_policy_document.${key}.json + } + + resource "aws_iam_user_policy_attachment" "${key}" { + user = aws_iam_user.${key}.name + policy_arn = aws_iam_policy.${key}.arn + } + + """.trimIndent() + ) + + if (project.vpc) { + appendLine( + """ + resource "aws_security_group" "${key}" { + name = "${namePrefix}-${key}" + vpc_id = ${project.vpc_id} + + ingress { + from_port = 587 + to_port = 587 + protocol = "tcp" + cidr_blocks = [${project.vpc_cidr_block}] + } + } + resource "aws_vpc_endpoint" "${key}" { + vpc_id = ${project.vpc_id} + service_name = "com.amazonaws.${'$'}{var.deployment_location}.email-smtp" + security_group_ids = [aws_security_group.${key}.id] + vpc_endpoint_type = "Interface" + } + """.trimIndent() + ) + } + }, + settingOutput = { key -> + """ + { + url = "smtp://${'$'}{aws_iam_access_key.${key}.id}:${'$'}{aws_iam_access_key.${key}.ses_smtp_password_v4}@email-smtp.${'$'}{var.deployment_location}.amazonaws.com:587" + fromEmail = var.${key}_sender + } + """.trimIndent() + } + ) +} \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/singleec2.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/singleec2.kt new file mode 100644 index 000000000..99d5513c9 --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/singleec2.kt @@ -0,0 +1,217 @@ +package com.lightningkite.lightningserver.aws.terraform + + + +internal fun awsEc2Handler( + project: TerraformProjectInfo, + otherSections: List, +) = TerraformSection( + name = "ec2", + providers = listOf( + TerraformProvider( + name = "tls", + source = "hashicorp/tls", + version = "~>4.0.6" + ), + TerraformProvider( + name = "ssh", + source = "loafoe/tls", + version = "~>2.7.0" + ), + ), + inputs = listOf( + TerraformInput.string( + "instance_ubuntu_version", + "24.04", + description = "The ubuntu LTS version to use" + ), + TerraformInput.string( + "instance_size", + "t3.micro", + description = "The instance size to use; defaults to t2.micro" + ), + TerraformInput.string( + "admin_ip", + "0.0.0.0/32", + description = "Permits SSH from this address" + ), + TerraformInput( + "admins", + "list(object({ username=string, name=string, site=string, phone1=string, phone2=string, email=string, keys=list(string) }))", + description = "Keys for administrative access", + default = null, + ) + ), + emit = { + //language=HIL + // + appendLine(""" + + resource "local_sensitive_file" "settings_raw" { + content = jsonencode({ + ${ + otherSections.mapNotNull { it.toLightningServer }.flatMap { it.entries }.map { "${it.key} = ${it.value}" } + .map { it.replace("\n", "\n ") }.joinToString("\n ") + }}) + filename = "${'$'}{path.module}/build/raw-settings.json" + } + + resource "aws_iam_role" "main_exec" { + name = "${project.namePrefix}-main-exec" + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [{ + Action = "sts:AssumeRole" + Principal = { + Service = "ec2.amazonaws.com" + } + Effect = "Allow" + Sid = "" + }] + }) + } + ${otherSections.flatMap { it.policies }.joinToString("\n") { + """ + resource "aws_iam_role_policy_attachment" "$it" { + role = aws_iam_role.main_exec.id + policy_arn = aws_iam_policy.$it.arn + } + """ + }} + resource "aws_iam_instance_profile" "main_exec" { + name = "main_exec" + role = aws_iam_role.main_exec.name + } + data "aws_ami" "ubuntu" { + most_recent = true + + filter { + name = "name" + values = ["ubuntu/images/hvm-ssd-gp3/*24.04*"] + } + + filter { + name = "virtualization-type" + values = ["hvm"] + } + + filter { + name = "description" + values = ["*LTS*"] + } + + filter { + name = "architecture" + values = ["x86_64"] + } + + owners = ["099720109477"] # Canonical + } + resource "tls_private_key" "main" { + algorithm = "RSA" + rsa_bits = 4096 + } + resource "aws_key_pair" "main" { + key_name = "${project.namePrefix}-terraform-deploy-key" + public_key = tls_private_key.main.public_key_openssh + } + resource "aws_security_group" "main" { + name = "${project.namePrefix}-main" + description = "The rules for the server" + ${ + when { + project.existingVpc -> "vpc_id = data.aws_vpc.main.id" + project.vpc -> "vpc_id = aws_vpc.main.id" + else -> "" + } + } + + tags = { + Name = "${project.namePrefix}-main" + } + } + resource "aws_vpc_security_group_ingress_rule" "main_allow_http" { + security_group_id = aws_security_group.main.id + cidr_ipv4 = "0.0.0.0/0" + from_port = 80 + ip_protocol = "tcp" + to_port = 80 + } + resource "aws_vpc_security_group_ingress_rule" "main_allow_https" { + security_group_id = aws_security_group.main.id + cidr_ipv4 = "0.0.0.0/0" + from_port = 443 + ip_protocol = "tcp" + to_port = 443 + } + resource "aws_vpc_security_group_ingress_rule" "allow_tls_ipv4" { + security_group_id = aws_security_group.main.id + cidr_ipv4 = var.admin_ip + from_port = 22 + ip_protocol = "tcp" + to_port = 22 + } + resource "aws_vpc_security_group_egress_rule" "allow_tls_ipv4" { + security_group_id = aws_security_group.main.id + cidr_ipv4 = "0.0.0.0/0" + ip_protocol = "-1" + } + + locals { + userCode = join("\n", [for x in var.admins : < /dev/null + sudo apt upgrade -y + sudo apt install -y angie + EOF + } + resource "aws_eip" "main" { + instance = aws_instance.main.id + ${if(project.vpc) "domain = \"vpc\"" else ""} + } + resource "aws_route53_record" "main" { + zone_id = data.aws_route53_zone.main.zone_id + name = var.domain_name + type = "A" + records = [aws_eip.main.public_ip] + ttl = "300" + } + """.trimIndent(), + ) + }, + outputs = listOf( + TerraformOutput("private_key", "tls_private_key.main.private_key_pem", sensitive = true) + ) +) \ No newline at end of file diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraformAws.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraformAws.kt new file mode 100644 index 000000000..4784a942b --- /dev/null +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraformAws.kt @@ -0,0 +1,7 @@ +package com.lightningkite.lightningserver.aws + +import java.io.File + +@Deprecated("Use the direct one instead", ReplaceWith("com.lightningkite.lightningserver.aws.terraform.createTerraform")) +fun terraformAws(handlerFqn: String, projectName: String = "project", root: File) + = com.lightningkite.lightningserver.aws.terraform.createTerraform(handlerFqn, projectName, root) \ No newline at end of file diff --git a/server-core/src/test/kotlin/com/lightningkite/lightningserver/serialization/CSV2Test.kt b/server-core/src/test/kotlin/com/lightningkite/lightningserver/serialization/CSV2Test.kt index ff2b7a2a7..d3d890983 100644 --- a/server-core/src/test/kotlin/com/lightningkite/lightningserver/serialization/CSV2Test.kt +++ b/server-core/src/test/kotlin/com/lightningkite/lightningserver/serialization/CSV2Test.kt @@ -147,6 +147,26 @@ auctionDay,auction,venue,company,auctionDate,city,state,country,wasMigrated,lotN ) ).decodeSerializableValue(serializer) ) + assertEquals( + basis, + StringDeferringDecoder( + config = stringDeferringConfig, + descriptor = serializer.descriptor, + map = mapOf( + "x" to "1", + "y" to "fdsa", + "z" to "notnull", + "a" to "false", + "a.x" to "42", + "a.z" to "null", + "a.a.x" to "-1", + "a.a.a" to "false", + "b.0" to "1", + "c.0" to "key", + "c.1" to "1" + ) + ).decodeSerializableValue(serializer).also { println(it) } + ) assertEquals( basis, StringDeferringDecoder( diff --git a/server-mongo/src/test/kotlin/com/lightningkite/lightningdb/SearchTextConditionTest.kt b/server-mongo/src/test/kotlin/com/lightningkite/lightningdb/SearchTextConditionTest.kt index eedc14f0c..8e6bfff43 100644 --- a/server-mongo/src/test/kotlin/com/lightningkite/lightningdb/SearchTextConditionTest.kt +++ b/server-mongo/src/test/kotlin/com/lightningkite/lightningdb/SearchTextConditionTest.kt @@ -116,6 +116,16 @@ class SearchTextConditionTest { .toList() .let { assertEquals(1, it.size, "Got $it") } + collection + .find(condition { it.fullTextSearch("india hotel", requireAllTermsPresent = true) }) + .toList() + .let { assertEquals(1, it.size, "Got $it") } + + collection + .find(condition { it.fullTextSearch("hotel", requireAllTermsPresent = true) }) + .toList() + .let { assertEquals(2, it.size, "Got $it") } + // var query = "One" // var condition = path().fullTextSearch(query, requireAllTermsPresent = true) // var results = collection.find(condition).toList() From 39a43debb9d3c55bcd1f96fbb21e610f0426cdf5 Mon Sep 17 00:00:00 2001 From: Joseph Ivie Date: Sun, 20 Oct 2024 15:08:32 -0600 Subject: [PATCH 2/4] middle of work --- demo/src/main/kotlin/Server.kt | 4 + .../aws/terraform/settingsHandlers.kt | 177 ++++++++++++++++++ 2 files changed, 181 insertions(+) diff --git a/demo/src/main/kotlin/Server.kt b/demo/src/main/kotlin/Server.kt index 8b7ecf341..5964ee3c3 100644 --- a/demo/src/main/kotlin/Server.kt +++ b/demo/src/main/kotlin/Server.kt @@ -129,6 +129,10 @@ object Server : ServerPathGroup(ServerPath.root) { HttpResponse.plainText("Hello ${it.user()}") } + val die2 = path("oom").get.handler { + throw OutOfMemoryError("Fake") + } + val socket = path("socket").websocket( connect = { println("Connected $it - you are ${it.user()}") }, message = { diff --git a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt index e12faa2df..c646cb621 100644 --- a/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt +++ b/server-aws/src/main/kotlin/com/lightningkite/lightningserver/aws/terraform/settingsHandlers.kt @@ -517,6 +517,183 @@ object SettingsHandlers { """.trimIndent() } ) + val `MongoDB Serverless on Existing Project` = TerraformHandler.handler( + name = "MongoDB Serverless on Existing Project", + priority = 0, + providers = listOf(TerraformProvider.mongodbatlas), + inputs = { key -> + listOf( + TerraformInput.string("${key}_org_id", null), + TerraformInput.boolean("${key}_continuous_backup", false), + TerraformInput.string("${key}_project_id", null) + ) + }, + emit = { + appendLine( + """ + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "mongodbatlas_serverless_instance" "$key" { + project_id = var.${key}_project_id + name = "$namePrefixSafe$key" + + provider_settings_backing_provider_name = "AWS" + provider_settings_provider_name = "SERVERLESS" + provider_settings_region_name = replace(upper(var.deployment_location), "-", "_") + + continuous_backup_enabled = var.${key}_continuous_backup + } + resource "mongodbatlas_database_user" "$key" { + username = "$namePrefixSafe$key-main" + password = random_password.$key.result + project_id = var.${key}_project_id + auth_database_name = "admin" + + roles { + role_name = "readWrite" + database_name = "default" + } + + roles { + role_name = "readAnyDatabase" + database_name = "admin" + } + + } + """.trimIndent() + ) + if (project.vpc) { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + for_each = toset(${project.natGatewayIp}) + project_id = var.${key}_project_id + cidr_block = "${'$'}{each.value}/32" + comment = "NAT Gateway" + } + """.trimIndent() + ) + } else { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + project_id = var.${key}_project_id + cidr_block = "0.0.0.0/0" + comment = "Anywhere" + } + """.trimIndent() + ) + } + }, + settingOutput = { key -> + """ + { + url = "mongodb+srv://$namePrefixSafe$key-main:${'$'}{random_password.${key}.result}@${'$'}{replace(mongodbatlas_serverless_instance.$key.connection_strings_standard_srv, "mongodb+srv://", "")}/default?retryWrites=true&w=majority" + } + """.trimIndent() + } + ) + val `MongoDB Dedicated on Existing Project` = TerraformHandler.handler( + name = "MongoDB Dedicated on Existing Project", + priority = 0, + providers = listOf(TerraformProvider.mongodbatlas), + inputs = { key -> + listOf( + TerraformInput.string("${key}_org_id", null), + TerraformInput.string("${key}_min_size", "M10"), + TerraformInput.string("${key}_max_size", "M40"), + TerraformInput.string("${key}_project_id", null) + ) + }, + emit = { + appendLine( + """ + resource "random_password" "${key}" { + length = 32 + special = true + override_special = "-_" + } + resource "mongodbatlas_advanced_cluster" "database" { + project_id = var.${key}_project_id + name = "$namePrefixSafe$key" + cluster_type = "REPLICASET" + # lifecycle { ignore_changes = [instance_size] } + replication_specs { + region_configs { + auto_scaling { + compute_enabled = true + compute_min_instance_size = "M10" + compute_max_instance_size = var.${key}_max_size + compute_scale_down_enabled = true + disk_gb_enabled = true + } + electable_specs { + instance_size = var.${key}_min_size + node_count = 3 + } + analytics_specs { + instance_size = var.${key}_min_size + node_count = 1 + } + priority = 7 + provider_name = "AWS" + region_name = replace(upper(var.deployment_location), "-", "_") + } + } + } + resource "mongodbatlas_database_user" "$key" { + username = "$namePrefixSafe$key-main" + password = random_password.$key.result + project_id = var.${key}_project_id + auth_database_name = "admin" + + roles { + role_name = "readWrite" + database_name = "default" + } + + roles { + role_name = "readAnyDatabase" + database_name = "admin" + } + + } + """.trimIndent() + ) + if (project.vpc) { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + for_each = toset(${project.natGatewayIp}) + project_id = var.${key}_project_id + cidr_block = "${'$'}{each.value}/32" + comment = "NAT Gateway" + } + """.trimIndent() + ) + } else { + appendLine( + """ + resource "mongodbatlas_project_ip_access_list" "$key" { + project_id = var.${key}_project_id + cidr_block = "0.0.0.0/0" + comment = "Anywhere" + } + """.trimIndent() + ) + } + }, + settingOutput = { key -> + """ + { + url = "mongodb+srv://$namePrefixSafe$key-main:${'$'}{random_password.${key}.result}@${'$'}{replace(mongodbatlas_advanced_cluster.$key.connection_strings_standard_srv, "mongodb+srv://", "")}/default?retryWrites=true&w=majority" + } + """.trimIndent() + } + ) val ElastiCache = TerraformHandler.handler( name = "ElastiCache", inputs = { key -> From c3a641033d73880b779e3751b43b2753d1f9d8c9 Mon Sep 17 00:00:00 2001 From: Joseph Ivie Date: Wed, 23 Oct 2024 16:46:47 -0600 Subject: [PATCH 3/4] New terraform generator --- demo/terraform/example/.terraform.lock.hcl | 126 +++++++ demo/terraform/example/cloudwatch.tf | 330 ++++++++++++++++++ demo/terraform/example/lambda.tf | 10 + ...socketDatabaseChangeSubscriptionCleanup.tf | 9 - .../example/schedulecleanRedirectToFiles.tf | 9 - .../example/schedulecleanupUploads.tf | 9 - .../example/scheduleclearOldMetrics.tf | 9 - .../scheduletestmodelrestcleanDumps.tf | 26 ++ .../terraform/example/scheduletestschedule.tf | 9 - .../example/scheduletestschedule2.tf | 9 - demo/terraform/example/secretBasis.tf | 21 ++ demo/terraform/example/sms.tf | 21 ++ .../lightningserver/aws/terraform/lambda.kt | 37 +- 13 files changed, 544 insertions(+), 81 deletions(-) create mode 100644 demo/terraform/example/.terraform.lock.hcl create mode 100644 demo/terraform/example/cloudwatch.tf create mode 100644 demo/terraform/example/scheduletestmodelrestcleanDumps.tf create mode 100644 demo/terraform/example/secretBasis.tf create mode 100644 demo/terraform/example/sms.tf diff --git a/demo/terraform/example/.terraform.lock.hcl b/demo/terraform/example/.terraform.lock.hcl new file mode 100644 index 000000000..e937b921d --- /dev/null +++ b/demo/terraform/example/.terraform.lock.hcl @@ -0,0 +1,126 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/archive" { + version = "2.2.0" + constraints = "~> 2.2.0" + hashes = [ + "h1:62mVchC1L6vOo5QS9uUf52uu0emsMM+LsPQJ1BEaTms=", + "zh:06bd875932288f235c16e2237142b493c2c2b6aba0e82e8c85068332a8d2a29e", + "zh:0c681b481372afcaefddacc7ccdf1d3bb3a0c0d4678a526bc8b02d0c331479bc", + "zh:100fc5b3fc01ea463533d7bbfb01cb7113947a969a4ec12e27f5b2be49884d6c", + "zh:55c0d7ddddbd0a46d57c51fcfa9b91f14eed081a45101dbfc7fd9d2278aa1403", + "zh:73a5dd68379119167934c48afa1101b09abad2deb436cd5c446733e705869d6b", + "zh:841fc4ac6dc3479981330974d44ad2341deada8a5ff9e3b1b4510702dfbdbed9", + "zh:91be62c9b41edb137f7f835491183628d484e9d6efa82fcb75cfa538c92791c5", + "zh:acd5f442bd88d67eb948b18dc2ed421c6c3faee62d3a12200e442bfff0aa7d8b", + "zh:ad5720da5524641ad718a565694821be5f61f68f1c3c5d2cfa24426b8e774bef", + "zh:e63f12ea938520b3f83634fc29da28d92eed5cfbc5cc8ca08281a6a9c36cca65", + "zh:f6542918faa115df46474a36aabb4c3899650bea036b5f8a5e296be6f8f25767", + ] +} + +provider "registry.terraform.io/hashicorp/aws" { + version = "4.67.0" + constraints = "~> 4.30" + hashes = [ + "h1:5Zfo3GfRSWBaXs4TGQNOflr1XaYj6pRnVJLX5VAjFX4=", + "zh:0843017ecc24385f2b45f2c5fce79dc25b258e50d516877b3affee3bef34f060", + "zh:19876066cfa60de91834ec569a6448dab8c2518b8a71b5ca870b2444febddac6", + "zh:24995686b2ad88c1ffaa242e36eee791fc6070e6144f418048c4ce24d0ba5183", + "zh:4a002990b9f4d6d225d82cb2fb8805789ffef791999ee5d9cb1fef579aeff8f1", + "zh:559a2b5ace06b878c6de3ecf19b94fbae3512562f7a51e930674b16c2f606e29", + "zh:6a07da13b86b9753b95d4d8218f6dae874cf34699bca1470d6effbb4dee7f4b7", + "zh:768b3bfd126c3b77dc975c7c0e5db3207e4f9997cf41aa3385c63206242ba043", + "zh:7be5177e698d4b547083cc738b977742d70ed68487ce6f49ecd0c94dbf9d1362", + "zh:8b562a818915fb0d85959257095251a05c76f3467caa3ba95c583ba5fe043f9b", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:9c385d03a958b54e2afd5279cd8c7cbdd2d6ca5c7d6a333e61092331f38af7cf", + "zh:b3ca45f2821a89af417787df8289cb4314b273d29555ad3b2a5ab98bb4816b3b", + "zh:da3c317f1db2469615ab40aa6baba63b5643bae7110ff855277a1fb9d8eb4f2c", + "zh:dc6430622a8dc5cdab359a8704aec81d3825ea1d305bbb3bbd032b1c6adfae0c", + "zh:fac0d2ddeadf9ec53da87922f666e1e73a603a611c57bcbc4b86ac2821619b1d", + ] +} + +provider "registry.terraform.io/hashicorp/local" { + version = "2.5.2" + constraints = "~> 2.2" + hashes = [ + "h1:IyFbOIO6mhikFNL/2h1iZJ6kyN3U00jgkpCLUCThAfE=", + "zh:136299545178ce281c56f36965bf91c35407c11897f7082b3b983d86cb79b511", + "zh:3b4486858aa9cb8163378722b642c57c529b6c64bfbfc9461d940a84cd66ebea", + "zh:4855ee628ead847741aa4f4fc9bed50cfdbf197f2912775dd9fe7bc43fa077c0", + "zh:4b8cd2583d1edcac4011caafe8afb7a95e8110a607a1d5fb87d921178074a69b", + "zh:52084ddaff8c8cd3f9e7bcb7ce4dc1eab00602912c96da43c29b4762dc376038", + "zh:71562d330d3f92d79b2952ffdda0dad167e952e46200c767dd30c6af8d7c0ed3", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:805f81ade06ff68fa8b908d31892eaed5c180ae031c77ad35f82cb7a74b97cf4", + "zh:8b6b3ebeaaa8e38dd04e56996abe80db9be6f4c1df75ac3cccc77642899bd464", + "zh:ad07750576b99248037b897de71113cc19b1a8d0bc235eb99173cc83d0de3b1b", + "zh:b9f1c3bfadb74068f5c205292badb0661e17ac05eb23bfe8bd809691e4583d0e", + "zh:cc4cbcd67414fefb111c1bf7ab0bc4beb8c0b553d01719ad17de9a047adff4d1", + ] +} + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.3" + constraints = "~> 3.2" + hashes = [ + "h1:I0Um8UkrMUb81Fxq/dxbr3HLP2cecTH2WMJiwKSrwQY=", + "zh:22d062e5278d872fe7aed834f5577ba0a5afe34a3bdac2b81f828d8d3e6706d2", + "zh:23dead00493ad863729495dc212fd6c29b8293e707b055ce5ba21ee453ce552d", + "zh:28299accf21763ca1ca144d8f660688d7c2ad0b105b7202554ca60b02a3856d3", + "zh:55c9e8a9ac25a7652df8c51a8a9a422bd67d784061b1de2dc9fe6c3cb4e77f2f", + "zh:756586535d11698a216291c06b9ed8a5cc6a4ec43eee1ee09ecd5c6a9e297ac1", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:9d5eea62fdb587eeb96a8c4d782459f4e6b73baeece4d04b4a40e44faaee9301", + "zh:a6355f596a3fb8fc85c2fb054ab14e722991533f87f928e7169a486462c74670", + "zh:b5a65a789cff4ada58a5baffc76cb9767dc26ec6b45c00d2ec8b1b027f6db4ed", + "zh:db5ab669cf11d0e9f81dc380a6fdfcac437aea3d69109c7aef1a5426639d2d65", + "zh:de655d251c470197bcbb5ac45d289595295acb8f829f6c781d4a75c8c8b7c7dd", + "zh:f5c68199f2e6076bce92a12230434782bf768103a427e9bb9abee99b116af7b5", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.1.3" + constraints = "~> 3.1.0" + hashes = [ + "h1:LPSVX+oXKGaZmxgtaPf2USxoEsWK/pnhmm/5FKw+PtU=", + "zh:26e07aa32e403303fc212a4367b4d67188ac965c37a9812e07acee1470687a73", + "zh:27386f48e9c9d849fbb5a8828d461fde35e71f6b6c9fc235bc4ae8403eb9c92d", + "zh:5f4edda4c94240297bbd9b83618fd362348cadf6bf24ea65ea0e1844d7ccedc0", + "zh:646313a907126cd5e69f6a9fafe816e9154fccdc04541e06fed02bb3a8fa2d2e", + "zh:7349692932a5d462f8dee1500ab60401594dddb94e9aa6bf6c4c0bd53e91bbb8", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:9034daba8d9b32b35930d168f363af04cecb153d5849a7e4a5966c97c5dc956e", + "zh:bb81dfca59ef5f949ef39f19ea4f4de25479907abc28cdaa36d12ecd7c0a9699", + "zh:bcf7806b99b4c248439ae02c8e21f77aff9fadbc019ce619b929eef09d1221bb", + "zh:d708e14d169e61f326535dd08eecd3811cd4942555a6f8efabc37dbff9c6fc61", + "zh:dc294e19a46e1cefb9e557a7b789c8dd8f319beca99b8c265181bc633dc434cc", + "zh:f9d758ee53c55dc016dd736427b6b0c3c8eb4d0dbbc785b6a3579b0ffedd9e42", + ] +} + +provider "registry.terraform.io/mongodb/mongodbatlas" { + version = "1.21.2" + constraints = "~> 1.4" + hashes = [ + "h1:9DjT1Mw8OD39Llt/bV4SYxYBx1JZFg/SmfV5bG6+jNs=", + "zh:072ddb37277f48fca1a3799915d65b0ef6c4851d0424118de82a2119a7d8c267", + "zh:108f8e7659e59aa47a044b4d2106e3335b3d0d275c37aeebf1e8ad3de9750119", + "zh:2ed426a31db8f3d98bd3c230836168ae95d7bf9ccd53896f9ac41609e305fa47", + "zh:4b1b3ffe1127ee283e461a5a8c51f29b17f04e13ce425b4fba3a079a60d7ef59", + "zh:52efc34cdd4d1997adc717ff4199556bc64ebb9a5edd6280c8b825947e6cd523", + "zh:54a6e0d73c524dc9c111411024b8fecce38a5716fc1a83b9f4ba25e64d036c55", + "zh:67651f591886f4834f78535b00beb37c1c4c1f3d8591547c69e3d35d84e65a72", + "zh:8b5325b2fea45d72450c763f450b845f014b8d4d76bd5b8bebc99dbbdcb63f03", + "zh:d64f7bec0f18963bd548b847b3322d3510cfc6dfdffd38382d4a5f1fc20b14de", + "zh:e3daf97ec1607c2e8dd73bbb490177037ad5d5b82647ce7f25e944be451b21ac", + "zh:e5d7183de85761298225e8a7c9c6098b82de8f408f48aa1a4735b87b2ccf0e54", + "zh:e78a907e41095433d6742e5878527bcc3d469ed3765d2dd309d29d479091f8eb", + "zh:e7ccb0ec2a9739e108ed4ad205eed5f46734d5629ca524508e87a39390521dff", + "zh:fc46fea9a56c62ceac5179cac9145e292e301c0f4ce0448c89d59c58b3cb785a", + ] +} diff --git a/demo/terraform/example/cloudwatch.tf b/demo/terraform/example/cloudwatch.tf new file mode 100644 index 000000000..f4f6e89d3 --- /dev/null +++ b/demo/terraform/example/cloudwatch.tf @@ -0,0 +1,330 @@ +# Generated via Lightning Server. This file will be overwritten or deleted when regenerating. +########## +# Inputs +########## + +variable "emergencyInvocationsPerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Number of Invocations Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use emergencyInvocations which allows defined both threshold and period)" +} +variable "emergencyComputePerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Milliseconds of Compute Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use emergencyCompute which allows defined both threshold and period)" +} +variable "panicInvocationsPerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Number of Invocations Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use panicInvocations which allows defined both threshold and period)" +} +variable "panicComputePerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Milliseconds of Compute Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use panicCompute which allows defined both threshold and period)" +} +variable "emergencyInvocations" { + type = object({ threshold = number, period = number, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Emergency Invocation alarm. Threshold is the Number of Invocations, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.emergencyInvocations == null ? true : var.emergencyInvocations.evaluationPeriods > 0) + error_message = "emergencyInvocations evaluationPeriods must be greater than 0" + } + validation { + condition = (var.emergencyInvocations == null ? true : (var.emergencyInvocations.dataPointsToAlarm <= var.emergencyInvocations.evaluationPeriods && var.emergencyInvocations.dataPointsToAlarm > 0)) + error_message = "emergencyInvocations dataPointsToAlarm must be greater than 0 and less than or equal to emergencyInvocations evaluationPeriods" + } +} +variable "emergencyCompute" { + type = object({ threshold = number, period = number, statistic = string, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Emergency Compute alarm. Threshold is the Milliseconds of Compute, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.emergencyCompute == null ? true : contains(["Sum", "Average", "Maximum"], var.emergencyCompute.statistic)) + error_message = "Allowed values for emergencyCompute statistic are: \"Sum\", \"Average\", \"Maximum\"." + } + validation { + condition = (var.emergencyCompute == null ? true : var.emergencyCompute.evaluationPeriods > 0) + error_message = "emergencyCompute evaluationPeriods must be greater than 0" + } + validation { + condition = (var.emergencyCompute == null ? true : (var.emergencyCompute.dataPointsToAlarm <= var.emergencyCompute.evaluationPeriods && var.emergencyCompute.dataPointsToAlarm > 0)) + error_message = "emergencyCompute dataPointsToAlarm must be greater than 0 and less than or equal to emergencyCompute evaluationPeriods" + } +} +variable "panicInvocations" { + type = object({ threshold = number, period = number, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Panic Invocations alarm. Threshold is the Number of Invocations, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.panicInvocations == null ? true : var.panicInvocations.evaluationPeriods > 0) + error_message = "panicInvocations evaluationPeriods must be greater than 0" + } + validation { + condition = (var.panicInvocations == null ? true : (var.panicInvocations.dataPointsToAlarm <= var.panicInvocations.evaluationPeriods && var.panicInvocations.dataPointsToAlarm > 0)) + error_message = "panicInvocations dataPointsToAlarm must be greater than 0 and less than or equal to panicInvocations evaluationPeriods" + } +} +variable "panicCompute" { + type = object({ threshold = number, period = number, statistic = string, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Panic Compute alarm. Threshold is the Milliseconds of Compute, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.panicCompute == null ? true : contains(["Sum", "Average", "Maximum"], var.panicCompute.statistic)) + error_message = "Allowed values for panicCompute statistic are: \"Sum\", \"Average\", \"Maximum\"." + } + validation { + condition = (var.panicCompute == null ? true : var.panicCompute.evaluationPeriods > 0) + error_message = "panicCompute evaluationPeriods must be greater than 0" + } + validation { + condition = (var.panicCompute == null ? true : (var.panicCompute.dataPointsToAlarm <= var.panicCompute.evaluationPeriods && var.panicCompute.dataPointsToAlarm > 0)) + error_message = "panicCompute dataPointsToAlarm must be greater than 0 and less than or equal to panicCompute evaluationPeriods" + } +} +variable "emergencyContact" { + type = string + nullable = true + description = "The email address that will receive emails when alarms are triggered." +} + +########## +# Outputs +########## + + +########## +# Resources +########## + +locals { + anyNotifications = (var.emergencyContact != null && + (var.emergencyInvocationsPerMinuteThreshold != null || + var.emergencyComputePerMinuteThreshold != null || + var.panicInvocationsPerMinuteThreshold != null || + var.panicComputePerMinuteThreshold != null || + var.emergencyInvocations != null || + var.emergencyCompute != null || + var.panicInvocations != null || + var.panicCompute != null)) +} +resource "aws_sns_topic" "emergency" { + count = local.anyNotifications ? 1 : 0 + name = "demo-example_emergencies" +} +resource "aws_sns_topic_subscription" "emergency_primary" { + count = local.anyNotifications ? 1 : 0 + topic_arn = aws_sns_topic.emergency[0].arn + protocol = "email" + endpoint = var.emergencyContact +} +resource "aws_cloudwatch_metric_alarm" "emergency_minute_invocations" { + count = local.anyNotifications && var.emergencyInvocationsPerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo-example_emergency_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.emergencyInvocationsPerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "emergency_minute_compute" { + count = local.anyNotifications && var.emergencyComputePerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo-example_emergency_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Duration" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.emergencyComputePerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_minute_invocations" { + count = local.anyNotifications && var.panicInvocationsPerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo-example_panic_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.panicInvocationsPerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_minute_compute" { + count = local.anyNotifications && var.panicComputePerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo-example_panic_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Duration" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.panicComputePerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} + + +resource "aws_cloudwatch_metric_alarm" "emergency_invocations" { + count = (local.anyNotifications && + var.emergencyInvocations != null ? + 1 : 0) + alarm_name = "demo-example_emergency_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.emergencyInvocations.evaluationPeriods + datapoints_to_alarm = var.emergencyInvocations.dataPointsToAlarm + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = var.emergencyInvocations.period * 60 + statistic = "Sum" + threshold = var.emergencyInvocations.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "emergency_compute" { + count = (local.anyNotifications && + var.emergencyCompute != null ? + 1 : 0) + alarm_name = "demo-example_emergency_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.emergencyCompute.evaluationPeriods + datapoints_to_alarm = var.emergencyCompute.dataPointsToAlarm + metric_name = "Duration" + namespace = "AWS/Lambda" + period = var.emergencyCompute.period * 60 + statistic = var.emergencyCompute.statistic + threshold = var.emergencyCompute.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_invocations" { + count = (local.anyNotifications && + var.panicInvocations != null ? + 1 : 0) + alarm_name = "demo-example_panic_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.panicInvocations.evaluationPeriods + datapoints_to_alarm = var.panicInvocations.dataPointsToAlarm + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = var.panicInvocations.period * 60 + statistic = "Sum" + threshold = var.panicInvocations.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_compute" { + count = (local.anyNotifications && + var.panicCompute != null ? + 1 : 0) + alarm_name = "demo-example_panic_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.panicCompute.evaluationPeriods + datapoints_to_alarm = var.panicCompute.dataPointsToAlarm + metric_name = "Duration" + namespace = "AWS/Lambda" + period = var.panicCompute.period * 60 + statistic = var.panicCompute.statistic + threshold = var.panicCompute.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} + + +resource "aws_api_gateway_account" "main" { + cloudwatch_role_arn = aws_iam_role.cloudwatch.arn +} + +resource "aws_iam_role" "cloudwatch" { + name = "demoexample" + + assume_role_policy = < Date: Thu, 24 Oct 2024 12:14:02 -0600 Subject: [PATCH 4/4] New terraform generator in progress --- demo/terraform/lkec2/cache.tf | 16 + demo/terraform/lkec2/cloud.tf | 46 +++ demo/terraform/lkec2/cloudwatch.tf | 330 ++++++++++++++++++ demo/terraform/lkec2/database.tf | 63 ++++ demo/terraform/lkec2/email.tf | 107 ++++++ demo/terraform/lkec2/exceptions.tf | 21 ++ demo/terraform/lkec2/files.tf | 98 ++++++ demo/terraform/lkec2/general.tf | 28 ++ demo/terraform/lkec2/http.tf | 127 +++++++ demo/terraform/lkec2/jwt.tf | 31 ++ demo/terraform/lkec2/lambda.tf | 270 ++++++++++++++ demo/terraform/lkec2/logging.tf | 21 ++ demo/terraform/lkec2/main.tf | 36 ++ demo/terraform/lkec2/metrics.tf | 48 +++ demo/terraform/lkec2/project.json | 24 ++ ...socketDatabaseChangeSubscriptionCleanup.tf | 26 ++ .../lkec2/schedulecleanRedirectToFiles.tf | 26 ++ .../terraform/lkec2/schedulecleanupUploads.tf | 26 ++ .../lkec2/scheduleclearOldMetrics.tf | 26 ++ .../lkec2/scheduletestmodelrestcleanDumps.tf | 26 ++ demo/terraform/lkec2/scheduletestschedule.tf | 26 ++ demo/terraform/lkec2/scheduletestschedule2.tf | 26 ++ demo/terraform/lkec2/secretBasis.tf | 21 ++ demo/terraform/lkec2/sms.tf | 21 ++ demo/terraform/lkec2/terraform.tfvars | 31 ++ demo/terraform/lkec2/tf | 3 + demo/terraform/lkec2/tf.ps1 | 2 + demo/terraform/lkec2/websockets.tf | 163 +++++++++ 28 files changed, 1689 insertions(+) create mode 100644 demo/terraform/lkec2/cache.tf create mode 100644 demo/terraform/lkec2/cloud.tf create mode 100644 demo/terraform/lkec2/cloudwatch.tf create mode 100644 demo/terraform/lkec2/database.tf create mode 100644 demo/terraform/lkec2/email.tf create mode 100644 demo/terraform/lkec2/exceptions.tf create mode 100644 demo/terraform/lkec2/files.tf create mode 100644 demo/terraform/lkec2/general.tf create mode 100644 demo/terraform/lkec2/http.tf create mode 100644 demo/terraform/lkec2/jwt.tf create mode 100644 demo/terraform/lkec2/lambda.tf create mode 100644 demo/terraform/lkec2/logging.tf create mode 100644 demo/terraform/lkec2/main.tf create mode 100644 demo/terraform/lkec2/metrics.tf create mode 100644 demo/terraform/lkec2/project.json create mode 100644 demo/terraform/lkec2/scheduleWebsocketDatabaseChangeSubscriptionCleanup.tf create mode 100644 demo/terraform/lkec2/schedulecleanRedirectToFiles.tf create mode 100644 demo/terraform/lkec2/schedulecleanupUploads.tf create mode 100644 demo/terraform/lkec2/scheduleclearOldMetrics.tf create mode 100644 demo/terraform/lkec2/scheduletestmodelrestcleanDumps.tf create mode 100644 demo/terraform/lkec2/scheduletestschedule.tf create mode 100644 demo/terraform/lkec2/scheduletestschedule2.tf create mode 100644 demo/terraform/lkec2/secretBasis.tf create mode 100644 demo/terraform/lkec2/sms.tf create mode 100644 demo/terraform/lkec2/terraform.tfvars create mode 100755 demo/terraform/lkec2/tf create mode 100755 demo/terraform/lkec2/tf.ps1 create mode 100644 demo/terraform/lkec2/websockets.tf diff --git a/demo/terraform/lkec2/cache.tf b/demo/terraform/lkec2/cache.tf new file mode 100644 index 000000000..2b06feb0d --- /dev/null +++ b/demo/terraform/lkec2/cache.tf @@ -0,0 +1,16 @@ +# Generated via Lightning Server. This file will be overwritten or deleted when regenerating. +########## +# Inputs +########## + + +########## +# Outputs +########## + + +########## +# Resources +########## + + diff --git a/demo/terraform/lkec2/cloud.tf b/demo/terraform/lkec2/cloud.tf new file mode 100644 index 000000000..2483f452b --- /dev/null +++ b/demo/terraform/lkec2/cloud.tf @@ -0,0 +1,46 @@ +# Generated via Lightning Server. This file will be overwritten or deleted when regenerating. +########## +# Inputs +########## + +variable "deployment_location" { + type = string + default = "us-west-2" + nullable = false + description = "The AWS region key to deploy all resources in." +} +variable "debug" { + type = bool + default = false + nullable = false + description = "The GeneralSettings debug. Debug true will turn on various things during run time for easier development and bug tracking. Should be false for production environments." +} +variable "ip_prefix" { + type = string + default = "10.0" + nullable = false +} +variable "domain_name_zone" { + type = string + nullable = false + description = "The AWS Hosted zone the domain will be placed under." +} +variable "domain_name" { + type = string + nullable = false + description = "The domain the server will be hosted at." +} + +########## +# Outputs +########## + + +########## +# Resources +########## + +data "aws_route53_zone" "main" { + name = var.domain_name_zone +} + diff --git a/demo/terraform/lkec2/cloudwatch.tf b/demo/terraform/lkec2/cloudwatch.tf new file mode 100644 index 000000000..0c3bcbbeb --- /dev/null +++ b/demo/terraform/lkec2/cloudwatch.tf @@ -0,0 +1,330 @@ +# Generated via Lightning Server. This file will be overwritten or deleted when regenerating. +########## +# Inputs +########## + +variable "emergencyInvocationsPerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Number of Invocations Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use emergencyInvocations which allows defined both threshold and period)" +} +variable "emergencyComputePerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Milliseconds of Compute Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use emergencyCompute which allows defined both threshold and period)" +} +variable "panicInvocationsPerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Number of Invocations Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use panicInvocations which allows defined both threshold and period)" +} +variable "panicComputePerMinuteThreshold" { + type = number + default = null + nullable = true + description = "Milliseconds of Compute Per Minute, Assign null to not create this alarm. (DEPRECATED!! Use panicCompute which allows defined both threshold and period)" +} +variable "emergencyInvocations" { + type = object({ threshold = number, period = number, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Emergency Invocation alarm. Threshold is the Number of Invocations, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.emergencyInvocations == null ? true : var.emergencyInvocations.evaluationPeriods > 0) + error_message = "emergencyInvocations evaluationPeriods must be greater than 0" + } + validation { + condition = (var.emergencyInvocations == null ? true : (var.emergencyInvocations.dataPointsToAlarm <= var.emergencyInvocations.evaluationPeriods && var.emergencyInvocations.dataPointsToAlarm > 0)) + error_message = "emergencyInvocations dataPointsToAlarm must be greater than 0 and less than or equal to emergencyInvocations evaluationPeriods" + } +} +variable "emergencyCompute" { + type = object({ threshold = number, period = number, statistic = string, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Emergency Compute alarm. Threshold is the Milliseconds of Compute, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.emergencyCompute == null ? true : contains(["Sum", "Average", "Maximum"], var.emergencyCompute.statistic)) + error_message = "Allowed values for emergencyCompute statistic are: \"Sum\", \"Average\", \"Maximum\"." + } + validation { + condition = (var.emergencyCompute == null ? true : var.emergencyCompute.evaluationPeriods > 0) + error_message = "emergencyCompute evaluationPeriods must be greater than 0" + } + validation { + condition = (var.emergencyCompute == null ? true : (var.emergencyCompute.dataPointsToAlarm <= var.emergencyCompute.evaluationPeriods && var.emergencyCompute.dataPointsToAlarm > 0)) + error_message = "emergencyCompute dataPointsToAlarm must be greater than 0 and less than or equal to emergencyCompute evaluationPeriods" + } +} +variable "panicInvocations" { + type = object({ threshold = number, period = number, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Panic Invocations alarm. Threshold is the Number of Invocations, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.panicInvocations == null ? true : var.panicInvocations.evaluationPeriods > 0) + error_message = "panicInvocations evaluationPeriods must be greater than 0" + } + validation { + condition = (var.panicInvocations == null ? true : (var.panicInvocations.dataPointsToAlarm <= var.panicInvocations.evaluationPeriods && var.panicInvocations.dataPointsToAlarm > 0)) + error_message = "panicInvocations dataPointsToAlarm must be greater than 0 and less than or equal to panicInvocations evaluationPeriods" + } +} +variable "panicCompute" { + type = object({ threshold = number, period = number, statistic = string, evaluationPeriods = number, dataPointsToAlarm = number }) + default = null + nullable = true + description = "The configurations for the Panic Compute alarm. Threshold is the Milliseconds of Compute, Period is the timeframe in Minutes, and DataPointsToAlarm are how many periods need to breach in the number of EvaluationPeriods before an alarm is triggered. Assign null to not create this alarm." + validation { + condition = (var.panicCompute == null ? true : contains(["Sum", "Average", "Maximum"], var.panicCompute.statistic)) + error_message = "Allowed values for panicCompute statistic are: \"Sum\", \"Average\", \"Maximum\"." + } + validation { + condition = (var.panicCompute == null ? true : var.panicCompute.evaluationPeriods > 0) + error_message = "panicCompute evaluationPeriods must be greater than 0" + } + validation { + condition = (var.panicCompute == null ? true : (var.panicCompute.dataPointsToAlarm <= var.panicCompute.evaluationPeriods && var.panicCompute.dataPointsToAlarm > 0)) + error_message = "panicCompute dataPointsToAlarm must be greater than 0 and less than or equal to panicCompute evaluationPeriods" + } +} +variable "emergencyContact" { + type = string + nullable = true + description = "The email address that will receive emails when alarms are triggered." +} + +########## +# Outputs +########## + + +########## +# Resources +########## + +locals { + anyNotifications = (var.emergencyContact != null && + (var.emergencyInvocationsPerMinuteThreshold != null || + var.emergencyComputePerMinuteThreshold != null || + var.panicInvocationsPerMinuteThreshold != null || + var.panicComputePerMinuteThreshold != null || + var.emergencyInvocations != null || + var.emergencyCompute != null || + var.panicInvocations != null || + var.panicCompute != null)) +} +resource "aws_sns_topic" "emergency" { + count = local.anyNotifications ? 1 : 0 + name = "demo_emergencies" +} +resource "aws_sns_topic_subscription" "emergency_primary" { + count = local.anyNotifications ? 1 : 0 + topic_arn = aws_sns_topic.emergency[0].arn + protocol = "email" + endpoint = var.emergencyContact +} +resource "aws_cloudwatch_metric_alarm" "emergency_minute_invocations" { + count = local.anyNotifications && var.emergencyInvocationsPerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo_emergency_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.emergencyInvocationsPerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "emergency_minute_compute" { + count = local.anyNotifications && var.emergencyComputePerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo_emergency_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Duration" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.emergencyComputePerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_minute_invocations" { + count = local.anyNotifications && var.panicInvocationsPerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo_panic_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.panicInvocationsPerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_minute_compute" { + count = local.anyNotifications && var.panicComputePerMinuteThreshold != null ? 1 : 0 + alarm_name = "demo_panic_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = "1" + metric_name = "Duration" + namespace = "AWS/Lambda" + period = "60" + statistic = "Sum" + threshold = var.panicComputePerMinuteThreshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} + + +resource "aws_cloudwatch_metric_alarm" "emergency_invocations" { + count = (local.anyNotifications && + var.emergencyInvocations != null ? + 1 : 0) + alarm_name = "demo_emergency_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.emergencyInvocations.evaluationPeriods + datapoints_to_alarm = var.emergencyInvocations.dataPointsToAlarm + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = var.emergencyInvocations.period * 60 + statistic = "Sum" + threshold = var.emergencyInvocations.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "emergency_compute" { + count = (local.anyNotifications && + var.emergencyCompute != null ? + 1 : 0) + alarm_name = "demo_emergency_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.emergencyCompute.evaluationPeriods + datapoints_to_alarm = var.emergencyCompute.dataPointsToAlarm + metric_name = "Duration" + namespace = "AWS/Lambda" + period = var.emergencyCompute.period * 60 + statistic = var.emergencyCompute.statistic + threshold = var.emergencyCompute.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_invocations" { + count = (local.anyNotifications && + var.panicInvocations != null ? + 1 : 0) + alarm_name = "demo_panic_invocations" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.panicInvocations.evaluationPeriods + datapoints_to_alarm = var.panicInvocations.dataPointsToAlarm + metric_name = "Invocations" + namespace = "AWS/Lambda" + period = var.panicInvocations.period * 60 + statistic = "Sum" + threshold = var.panicInvocations.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} +resource "aws_cloudwatch_metric_alarm" "panic_compute" { + count = (local.anyNotifications && + var.panicCompute != null ? + 1 : 0) + alarm_name = "demo_panic_compute" + comparison_operator = "GreaterThanOrEqualToThreshold" + evaluation_periods = var.panicCompute.evaluationPeriods + datapoints_to_alarm = var.panicCompute.dataPointsToAlarm + metric_name = "Duration" + namespace = "AWS/Lambda" + period = var.panicCompute.period * 60 + statistic = var.panicCompute.statistic + threshold = var.panicCompute.threshold + alarm_description = "" + insufficient_data_actions = [] + dimensions = { + FunctionName = aws_lambda_function.main.function_name + } + alarm_actions = [aws_sns_topic.emergency[0].arn] +} + + +resource "aws_api_gateway_account" "main" { + cloudwatch_role_arn = aws_iam_role.cloudwatch.arn +} + +resource "aws_iam_role" "cloudwatch" { + name = "demo" + + assume_role_policy = <