Skip to content

Commit

Permalink
give examples and re-format
Browse files Browse the repository at this point in the history
  • Loading branch information
JDBraun committed Nov 21, 2024
1 parent 89a6fbe commit a636761
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 38 deletions.
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
// Terraform Documentation: https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/cluster

// Cluster Version
# data "databricks_spark_version" "latest_lts" {
# long_term_support = true
# }
data "databricks_spark_version" "latest_lts" {
long_term_support = true
}

# // Cluster Creation
# resource "databricks_cluster" "example" {
# cluster_name = "Shared Classic Compute Plane Cluster"
# data_security_mode = "USER_ISOLATION"
# spark_version = data.databricks_spark_version.latest_lts.id
# node_type_id = "i3.xlarge"
# autotermination_minutes = 10
// Cluster Creation
resource "databricks_cluster" "example" {
cluster_name = "Shared Classic Compute Plane Cluster"
data_security_mode = "USER_ISOLATION"
spark_version = data.databricks_spark_version.latest_lts.id
node_type_id = "i3.xlarge"
autotermination_minutes = 10

# autoscale {
# min_workers = 1
# max_workers = 2
# }
autoscale {
min_workers = 1
max_workers = 2
}

# // Derby Metastore configs
# spark_conf = {
# "spark.hadoop.datanucleus.autoCreateTables" : "true",
# "spark.hadoop.datanucleus.autoCreateSchema" : "true",
# "spark.hadoop.javax.jdo.option.ConnectionDriverName" : "org.apache.derby.jdbc.EmbeddedDriver",
# "spark.hadoop.javax.jdo.option.ConnectionPassword" : "hivepass",
# "spark.hadoop.javax.jdo.option.ConnectionURL" : "jdbc:derby:memory:myInMemDB;create=true",
# "spark.sql.catalogImplementation" : "hive",
# "spark.hadoop.javax.jdo.option.ConnectionUserName" : "hiveuser",
# "spark.hadoop.datanucleus.fixedDatastore" : "false"
# }
// Derby Metastore configs
spark_conf = {
"spark.hadoop.datanucleus.autoCreateTables" : "true",
"spark.hadoop.datanucleus.autoCreateSchema" : "true",
"spark.hadoop.javax.jdo.option.ConnectionDriverName" : "org.apache.derby.jdbc.EmbeddedDriver",
"spark.hadoop.javax.jdo.option.ConnectionPassword" : "hivepass",
"spark.hadoop.javax.jdo.option.ConnectionURL" : "jdbc:derby:memory:myInMemDB;create=true",
"spark.sql.catalogImplementation" : "hive",
"spark.hadoop.javax.jdo.option.ConnectionUserName" : "hiveuser",
"spark.hadoop.datanucleus.fixedDatastore" : "false"
}

# // Custom Tags
# custom_tags = {
# "Project" = var.resource_prefix
# }
# }
// Custom Tags
custom_tags = {
"Project" = var.resource_prefix
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ resource "time_sleep" "wait_60_seconds" {
}

locals {
uc_iam_role = "${var.resource_prefix}-catalog-${var.workspace_id}"
uc_iam_role = "${var.resource_prefix}-catalog-${var.workspace_id}"
uc_catalog_name_us = replace(var.uc_catalog_name, "-", "_")
}

Expand Down Expand Up @@ -173,4 +173,4 @@ resource "databricks_grant" "workspace_catalog" {

principal = var.user_workspace_catalog_admin
privileges = ["ALL_PRIVILEGES"]
}
}
12 changes: 6 additions & 6 deletions aws/tf/sra.tf
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@ module "SRA" {
sg_egress_ports = [443, 3306, 6666, 8443, 8444, 8445, 8446, 8447, 8448, 8449, 8450, 8451]

// REQUIRED IF USING NON-ROOT ACCOUNT CMK ADMIN:
# cmk_admin_arn = null // CMK admin ARN, defaults to the AWS account root.
# cmk_admin_arn = "arn:aws:iam::123456789012:user/CMKAdmin" // Example CMK ARN

// REQUIRED IF USING CUSTOM NETWORK:
# custom_vpc_id = null
# custom_private_subnet_ids = null // List of custom private subnet IDs required.
# custom_sg_id = null
# custom_relay_vpce_id = null
# custom_workspace_vpce_id = null
# custom_vpc_id = "vpc-0abc123456def7890" // Example VPC ID
# custom_private_subnet_ids = ["subnet-0123456789abcdef0", "subnet-0abcdef1234567890"] // Example private subnet IDs
# custom_sg_id = "sg-0123456789abcdef0" // Example security group ID
# custom_relay_vpce_id = "vpce-0abc123456def7890" // Example PrivateLink endpoint ID for Databricks relay
# custom_workspace_vpce_id = "vpce-0abcdef1234567890" // Example PrivateLink endpoint ID for Databricks workspace
}

0 comments on commit a636761

Please sign in to comment.