diff --git a/aws/tf/modules/sra/databricks_workspace/classic_cluster/cluster_configuration.tf b/aws/tf/modules/sra/databricks_workspace/classic_cluster/cluster_configuration.tf index a697eb5..fcadc56 100644 --- a/aws/tf/modules/sra/databricks_workspace/classic_cluster/cluster_configuration.tf +++ b/aws/tf/modules/sra/databricks_workspace/classic_cluster/cluster_configuration.tf @@ -1,37 +1,37 @@ // Terraform Documentation: https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/cluster // Cluster Version -# data "databricks_spark_version" "latest_lts" { -# long_term_support = true -# } +data "databricks_spark_version" "latest_lts" { + long_term_support = true +} -# // Cluster Creation -# resource "databricks_cluster" "example" { -# cluster_name = "Shared Classic Compute Plane Cluster" -# data_security_mode = "USER_ISOLATION" -# spark_version = data.databricks_spark_version.latest_lts.id -# node_type_id = "i3.xlarge" -# autotermination_minutes = 10 +// Cluster Creation +resource "databricks_cluster" "example" { + cluster_name = "Shared Classic Compute Plane Cluster" + data_security_mode = "USER_ISOLATION" + spark_version = data.databricks_spark_version.latest_lts.id + node_type_id = "i3.xlarge" + autotermination_minutes = 10 -# autoscale { -# min_workers = 1 -# max_workers = 2 -# } + autoscale { + min_workers = 1 + max_workers = 2 + } -# // Derby Metastore configs -# spark_conf = { -# "spark.hadoop.datanucleus.autoCreateTables" : "true", -# "spark.hadoop.datanucleus.autoCreateSchema" : "true", -# "spark.hadoop.javax.jdo.option.ConnectionDriverName" : "org.apache.derby.jdbc.EmbeddedDriver", -# "spark.hadoop.javax.jdo.option.ConnectionPassword" : "hivepass", -# "spark.hadoop.javax.jdo.option.ConnectionURL" : "jdbc:derby:memory:myInMemDB;create=true", -# "spark.sql.catalogImplementation" : "hive", -# "spark.hadoop.javax.jdo.option.ConnectionUserName" : "hiveuser", -# "spark.hadoop.datanucleus.fixedDatastore" : "false" -# } + // Derby Metastore configs + spark_conf = { + "spark.hadoop.datanucleus.autoCreateTables" : "true", + "spark.hadoop.datanucleus.autoCreateSchema" : "true", + "spark.hadoop.javax.jdo.option.ConnectionDriverName" : "org.apache.derby.jdbc.EmbeddedDriver", + "spark.hadoop.javax.jdo.option.ConnectionPassword" : "hivepass", + "spark.hadoop.javax.jdo.option.ConnectionURL" : "jdbc:derby:memory:myInMemDB;create=true", + "spark.sql.catalogImplementation" : "hive", + "spark.hadoop.javax.jdo.option.ConnectionUserName" : "hiveuser", + "spark.hadoop.datanucleus.fixedDatastore" : "false" + } -# // Custom Tags -# custom_tags = { -# "Project" = var.resource_prefix -# } -# } \ No newline at end of file + // Custom Tags + custom_tags = { + "Project" = var.resource_prefix + } +} \ No newline at end of file diff --git a/aws/tf/modules/sra/databricks_workspace/uc_catalog/uc_catalog.tf b/aws/tf/modules/sra/databricks_workspace/uc_catalog/uc_catalog.tf index 272130c..cf4b478 100644 --- a/aws/tf/modules/sra/databricks_workspace/uc_catalog/uc_catalog.tf +++ b/aws/tf/modules/sra/databricks_workspace/uc_catalog/uc_catalog.tf @@ -7,7 +7,7 @@ resource "time_sleep" "wait_60_seconds" { } locals { - uc_iam_role = "${var.resource_prefix}-catalog-${var.workspace_id}" + uc_iam_role = "${var.resource_prefix}-catalog-${var.workspace_id}" uc_catalog_name_us = replace(var.uc_catalog_name, "-", "_") } @@ -173,4 +173,4 @@ resource "databricks_grant" "workspace_catalog" { principal = var.user_workspace_catalog_admin privileges = ["ALL_PRIVILEGES"] -} +} \ No newline at end of file diff --git a/aws/tf/sra.tf b/aws/tf/sra.tf index 53f338a..8d32185 100644 --- a/aws/tf/sra.tf +++ b/aws/tf/sra.tf @@ -27,12 +27,12 @@ module "SRA" { sg_egress_ports = [443, 3306, 6666, 8443, 8444, 8445, 8446, 8447, 8448, 8449, 8450, 8451] // REQUIRED IF USING NON-ROOT ACCOUNT CMK ADMIN: - # cmk_admin_arn = null // CMK admin ARN, defaults to the AWS account root. + # cmk_admin_arn = "arn:aws:iam::123456789012:user/CMKAdmin" // Example CMK ARN // REQUIRED IF USING CUSTOM NETWORK: - # custom_vpc_id = null - # custom_private_subnet_ids = null // List of custom private subnet IDs required. - # custom_sg_id = null - # custom_relay_vpce_id = null - # custom_workspace_vpce_id = null + # custom_vpc_id = "vpc-0abc123456def7890" // Example VPC ID + # custom_private_subnet_ids = ["subnet-0123456789abcdef0", "subnet-0abcdef1234567890"] // Example private subnet IDs + # custom_sg_id = "sg-0123456789abcdef0" // Example security group ID + # custom_relay_vpce_id = "vpce-0abc123456def7890" // Example PrivateLink endpoint ID for Databricks relay + # custom_workspace_vpce_id = "vpce-0abcdef1234567890" // Example PrivateLink endpoint ID for Databricks workspace } \ No newline at end of file