-
Notifications
You must be signed in to change notification settings - Fork 7
/
cluster.tf
80 lines (67 loc) · 3.6 KB
/
cluster.tf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
locals {
cluster_policy_id = var.cluster_policy_id != null ? var.cluster_policy_id : join("", databricks_cluster_policy.this.*.id)
cluster_name = var.cluster_name != null ? var.cluster_name : "${var.teamid}-${var.prjid} (Terraform managed)"
}
resource "databricks_library" "maven" {
for_each = try(var.libraries.maven, null) != null ? var.libraries.maven : {}
cluster_id = databricks_cluster.cluster[local.cluster_name].id
maven {
coordinates = each.key
exclusions = try(each.value.exclusion, [])
}
}
resource "databricks_library" "python_wheel" {
for_each = try(var.libraries.python_wheel, null) != null ? var.libraries.python_wheel: {}
cluster_id = databricks_cluster.cluster[local.cluster_name].id
whl = each.key
}
resource "databricks_cluster" "cluster" {
for_each = var.deploy_cluster == true ? toset([local.cluster_name]) : toset([])
cluster_name = local.cluster_name
policy_id = var.cluster_policy_id == null && var.deploy_cluster_policy == false ? null : local.cluster_policy_id
spark_version = var.spark_version != null ? var.spark_version : data.databricks_spark_version.latest.id
node_type_id = var.deploy_worker_instance_pool != true ? local.worker_node_type : null
instance_pool_id = var.deploy_worker_instance_pool == true ? join("", databricks_instance_pool.worker_instance_nodes.*.id) : null
driver_node_type_id = var.deploy_worker_instance_pool != true ? local.driver_node_type : null
num_workers = var.fixed_value != null ? var.fixed_value : null
data_security_mode = var.data_security_mode
dynamic "autoscale" {
for_each = var.fixed_value == null && var.auto_scaling != null ? [var.auto_scaling] : []
content {
min_workers = autoscale.value[0]
max_workers = autoscale.value[1]
}
}
dynamic "aws_attributes" {
for_each = var.aws_attributes == null ? [] : [var.aws_attributes]
content {
instance_profile_arn = var.add_instance_profile_to_workspace == true ? join("", databricks_instance_profile.shared.*.id) : lookup(aws_attributes.value, "instance_profile_arn", null)
zone_id = lookup(aws_attributes.value, "zone_id", null)
first_on_demand = lookup(aws_attributes.value, "first_on_demand", null)
availability = lookup(aws_attributes.value, "availability", null)
spot_bid_price_percent = lookup(aws_attributes.value, "spot_bid_price_percent", null)
ebs_volume_count = lookup(aws_attributes.value, "ebs_volume_count", null)
ebs_volume_size = lookup(aws_attributes.value, "ebs_volume_size", null)
ebs_volume_type = lookup(aws_attributes.value, "ebs_volume_type", null)
}
}
dynamic "azure_attributes" {
for_each = var.azure_attributes == null ? [] : [var.azure_attributes]
content {
first_on_demand = lookup(azure_attributes.value, "first_on_demand", null)
availability = lookup(azure_attributes.value, "availability", null)
spot_bid_max_price = lookup(azure_attributes.value, "spot_bid_max_price", null)
}
}
dynamic "gcp_attributes" {
for_each = var.gcp_attributes == null ? [] : [var.gcp_attributes]
content {
availability = lookup(gcp_attributes.value, "availability", null)
}
}
autotermination_minutes = var.cluster_autotermination_minutes
custom_tags = var.fixed_value == 0 ? merge({ "ResourceClass" = "SingleNode" }, var.custom_tags) : var.custom_tags
spark_conf = var.fixed_value == 0 ? { "spark.databricks.cluster.profile" : "singleNode"
"spark.master" : "local[*]" } : var.spark_conf
spark_env_vars = var.spark_env_vars
}