Skip to content

Commit ae289e6

Browse files
author
dmytro_velychko3
committed
feat terraform fmt
1 parent 49a0afa commit ae289e6

File tree

4 files changed

+50
-14
lines changed

4 files changed

+50
-14
lines changed

README.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,12 @@ No modules.
3636
| [databricks_entitlements.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/entitlements) | resource |
3737
| [databricks_permissions.default_cluster](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/permissions) | resource |
3838
| [databricks_permissions.cluster_policy](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/permissions) | resource |
39+
| [databricks_permissions.unity_cluster](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/permissions) | resource |
3940
| [databricks_permissions.sql_endpoint](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/permissions) | resource |
4041
| [databricks_secret_acl.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/secret_acl) | resource |
4142
| [databricks_workspace_conf.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/workspace_conf) | resource |
4243
| [databricks_ip_access_list.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/ip_access_list) | resource |
43-
| [databricks_sql_global_config.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_global_config) | resource |
44+
| [databricks_sql_global_config.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_global_config) | resource |
4445
| [databricks_sql_endpoint.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_endpoint) | resource |
4546
| [azurerm_storage_data_lake_gen2_filesystem.this](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/storage_data_lake_gen2_filesystem) | resource |
4647
| [databricks_metastore.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/metastore) | resource |
@@ -55,6 +56,7 @@ No modules.
5556

5657

5758

59+
5860
## Inputs
5961

6062
| Name | Description | Type | Default | Required |
@@ -80,8 +82,9 @@ No modules.
8082
| <a name="input_external_metastore_id"></a> [external\_metastore\_id](#input\_external\_metastore\_id) | Unity Catalog Metastore Id that is located in separate environment. Provide this value to associate Databricks Workspace with target Metastore | `string` | " " | no |
8183
| <a name="input_metastore_grants"></a> [metastore\_grants](#input\_metastore\_grants) | Permissions to give on metastore to group | `map(list(string))` | {} | no |
8284
| <a name="input_secret_scope_object"></a> [secret\_scope\_object](#input\_secret\_scope\_object) | List of objects, where 'scope_name' param is a Secret scope name and 'acl' are list of objects with 'principals' and one of allowed 'permission' ('READ', 'WRITE' or 'MANAGE') | <pre>list(object({<br> scope_name = string<br> acl = list(object({<br> principal = string<br> permission = string<br> }))<br>}))</pre> | <pre>[{<br> scope_name = null<br> acl = null<br>}]</pre> | no |
83-
| <a name="input_unity_cluster_enabled"></a> [unity\_cluster\_enabled](#input\_unity\_cluster\_enabled) | Boolean flag for creating databricks claster | `bool` | false | no |
84-
| <a name="input_unity_cluster_config"></a> [unity\_unity\_cluster\_config](#input\_unity\_cluster\_config) | Specifies the databricks unity cluster configuration | <pre> type = object({ <br> cluster_name = optional(string)<br> spark_version = optional(string)<br> spark_conf = optional(map(any))<br> spark_env_vars = optional(map(any))<br> data_security_mode = optional(string)<br> node_type_id = optional(string)<br> autotermination_minutes = optional(number)<br> min_workers = optional(number)<br> max_workers = optional(number)<br> availability = optional(string)<br> first_on_demand = optional(number)<br> spot_bid_max_price = optional(number)<br>})<br></pre> | <pre> type = object({ <br> cluster_name = optional("Unity Catalog")<br> spark_version = optional("11.3.x-scala2.12")<br> spark_conf = optional({})<br> spark_env_vars = optional({})<br> data_security_mode = optional("USER_ISOLATION")<br> node_type_id = optional("Standard_D3_v2")<br> autotermination_minutes = optional(30)<br> min_workers = optional(1)<br> max_workers = optional(2)<br> availability = optional("ON_DEMAND_AZURE")<br> first_on_demand = optional(0)<br> spot_bid_max_price = optional(1)<br>})<br></pre> | no |
85+
| <a name="input_unity_cluster_enabled"></a> [unity\_cluster\_enabled](#input\_unity\_cluster\_enabled) | Boolean flag for creating databricks claster | `bool` | false | no |
86+
| <a name="input_unity_cluster_config"></a> [unity\_unity\_cluster\_config](#input\_unity\_cluster\_config) | Specifies the databricks unity cluster configuration | <pre>object({ <br> cluster_name = optional(string)<br> spark_version = optional(string)<br> spark_conf = optional(map(any))<br> spark_env_vars = optional(map(any))<br> data_security_mode = optional(string)<br> node_type_id = optional(string)<br> autotermination_minutes = optional(number)<br> min_workers = optional(number)<br> max_workers = optional(number)<br> availability = optional(string)<br> first_on_demand = optional(number)<br> spot_bid_max_price = optional(number)<br> permissions = optional(set(object({<br> group_name = string<br> permission_level = string<br> })))<br>})<br></pre> | <pre>object({ <br> cluster_name = optional("Unity Catalog")<br> spark_version = optional("11.3.x-scala2.12")<br> spark_conf = optional({})<br> spark_env_vars = optional({})<br> data_security_mode = optional("USER_ISOLATION")<br> node_type_id = optional("Standard_D3_v2")<br> autotermination_minutes = optional(30)<br> min_workers = optional(1)<br> max_workers = optional(2)<br> availability = optional("ON_DEMAND_AZURE")<br> first_on_demand = optional(0)<br> spot_bid_max_price = optional(1)<br> permissions = optional(set(object({<br> group_name = string<br> permission_level = string<br> })),null)<br>})<br></pre> | no |
87+
| <a name="input_cluster_log_conf_destination"></a> [cluster\_log\_conf\_destination](#input\_cluster\_log\_conf\_destination) | Provide a dbfs location, example 'dbfs:/cluster-logs', to push all cluster logs to certain location | `string` | " " | no |
8588

8689

8790

permissions.tf

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,20 @@ resource "databricks_permissions" "cluster_policy" {
3737
}
3838
}
3939

40+
resource "databricks_permissions" "unity_cluster" {
41+
count = length(var.unity_cluster_config.permissions) != 0 && var.unity_cluster_enabled == true ? 1 : 0
42+
43+
cluster_id = databricks_cluster.this[0].id
44+
45+
dynamic "access_control" {
46+
for_each = var.unity_cluster_config.permissions
47+
content {
48+
group_name = access_control.value.group_name
49+
permission_level = access_control.value.permission_level
50+
}
51+
}
52+
}
53+
4054
resource "databricks_permissions" "sql_endpoint" {
4155
for_each = {
4256
for endpoint in var.sql_endpoint : (endpoint.name) => endpoint

unity.tf

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -156,12 +156,18 @@ resource "databricks_cluster" "this" {
156156
spot_bid_max_price = var.unity_cluster_config.spot_bid_max_price
157157
}
158158

159-
#lifecycle {
160-
#ignore_changes = [
161-
# state
162-
#]
163-
#precondition {
164-
# condition = var.data_security_mode == "USER_ISOLATION" ? contains(["11.3.x-scala2.12", "12.0.x-scala2.12"], var.spark_version) : true
165-
# error_message = "When USER_ISOLATION is selected, please set spark version to be either one of these values: '11.3.x-scala2.12', '12.0.x-scala2.12'"
166-
#}
159+
dynamic "cluster_log_conf" {
160+
for_each = length(var.cluster_log_conf_destination) == 0 ? [] : [var.cluster_log_conf_destination]
161+
content {
162+
dbfs {
163+
destination = cluster_log_conf.value
164+
}
165+
}
166+
}
167+
168+
lifecycle {
169+
ignore_changes = [
170+
state
171+
]
172+
}
167173
}

variables.tf

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -197,8 +197,8 @@ variable "unity_cluster_config" {
197197
type = object({
198198
cluster_name = optional(string, "Unity Catalog")
199199
spark_version = optional(string, "11.3.x-scala2.12")
200-
spark_conf = optional(map(any), {})
201-
spark_env_vars = optional(map(any), {})
200+
spark_conf = optional(map(any), null)
201+
spark_env_vars = optional(map(any), null)
202202
data_security_mode = optional(string, "USER_ISOLATION")
203203
node_type_id = optional(string, "Standard_D3_v2")
204204
autotermination_minutes = optional(number, 30)
@@ -207,8 +207,21 @@ variable "unity_cluster_config" {
207207
availability = optional(string, "ON_DEMAND_AZURE")
208208
first_on_demand = optional(number, 0)
209209
spot_bid_max_price = optional(number, 1)
210-
210+
permissions = optional(set(object({
211+
group_name = string
212+
permission_level = string
213+
})), null)
211214
})
212215
description = "Specifies the databricks unity cluster configuration"
213216
default = {}
214217
}
218+
219+
variable "cluster_log_conf_destination" {
220+
type = string
221+
description = "Provide a dbfs location to push all cluster logs to certain location"
222+
default = ""
223+
validation {
224+
condition = length(var.cluster_log_conf_destination) == 0 ? true : startswith(var.cluster_log_conf_destination, "dbfs:/")
225+
error_message = "Provide valid path to dbfs logs folder, example: 'dbfs:/logs'"
226+
}
227+
}

0 commit comments

Comments
 (0)