Skip to content

Commit 90194ac

Browse files
author
dmytro_velychko3
committed
fix: move cluster policy to cluster tf
1 parent cb81e91 commit 90194ac

File tree

6 files changed

+59
-52
lines changed

6 files changed

+59
-52
lines changed

README.md

Lines changed: 31 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,14 @@ Here we provide some examples of how to provision it with a different options.
1010

1111
### In example below, these features of given module would be covered:
1212
1. Workspace admins assignment, custom Workspace group creation, group assignments, group entitlements
13-
2. Workspace IP Access list creation
14-
3. SQL Endpoint creation and configuration
15-
4. Create Cluster policy and assign permissions to custom groups
16-
5. Create Secret Scope and assign permissions to custom groups
17-
6. Connect to already existing Unity Catalog Metastore
13+
2. Default Shared Autoscaling cluster
14+
3. Workspace IP Access list creation
15+
4. ADLS Gen2 Mount
16+
5. Secret scope and its secrets
17+
6. SQL Endpoint creation and configuration
18+
7. Create Cluster policy and assign permissions to custom groups
19+
8. Create Secret Scope and assign permissions to custom groups
20+
9. Connect to already existing Unity Catalog Metastore
1821

1922
```hcl
2023
# Prerequisite resources
@@ -39,8 +42,8 @@ data "azurerm_key_vault" "example" {
3942
}
4043
4144
# Given module is tightly coupled with this "Runtime Premium" module, it's usage is prerequisite.
42-
module "databricks_runtime_core" {
43-
source = "data-platform-hq/databricks-runtime/databricks"
45+
module "databricks_runtime_premium" {
46+
source = "data-platform-hq/databricks-runtime-premium/databricks"
4447
4548
sku = data.databricks_workspace.example.sku
4649
workspace_id = data.databricks_workspace.example.workspace_id
@@ -52,10 +55,18 @@ module "databricks_runtime_core" {
5255
sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
5356
tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
5457
55-
# Default cluster parameters
58+
# Databricks clusters configuration
59+
databricks_cluster_configs = [{
60+
cluster_name = "shared autoscaling"
61+
data_security_mode = "NONE"
62+
availability = "SPOT_AZURE"
63+
spot_bid_max_price = -1
64+
permissions = [{group_name = "dev", permission_level = "CAN_MANAGE"}]
65+
}]
66+
67+
# Databricks cluster policies
5668
custom_cluster_policies = [{
5769
name = "custom_policy_1",
58-
assigned = true, # automatically assigns this policy to default shared cluster if set 'true'
5970
can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
6071
definition = {
6172
"autoscale.max_workers": {
@@ -97,6 +108,12 @@ module "databricks_runtime_premium" {
97108
"ip_range_2" = "10.33.0.0/16",
98109
}
99110
111+
# ADLS Gen2 Mount
112+
mountpoints = {
113+
storage_account_name = data.azurerm_storage_account.example.name
114+
container_name = "example_container"
115+
}
116+
100117
# Here is the map of users and theirs object ids.
101118
# This step is optional, in case of Service Principal assignment to workspace,
102119
# please only required to provide APP ID as it's value
@@ -130,13 +147,9 @@ module "databricks_runtime_premium" {
130147
]
131148
"service_principal" = []
132149
entitlements = ["allow_instance_pool_create","allow_cluster_create","databricks_sql_access"]
133-
default_cluster_permission = "CAN_RESTART" # assigns certain permission on default cluster to created group
134150
}
135151
}
136-
137-
# Assigns acls on secret scope to a custom group ("DEVELOPERS" in this example)
138-
secret_scope_object = module.databricks_runtime_core.secret_scope_object
139-
152+
140153
providers = {
141154
databricks = databricks.main
142155
}
@@ -290,7 +303,8 @@ No modules.
290303
| [azurerm_key_vault_secret.sp_client_id](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/key_vault_secret) | data |
291304
| [azurerm_key_vault_secret.sp_key](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/key_vault_secret) | data |
292305
| [azurerm_key_vault_secret.tenant_id](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/key_vault_secret) | data |
293-
| [databricks_workspace_conf.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/workspace_conf) | resource |
306+
| [databricks_workspace_conf.pat](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/workspace_conf) | resource |
307+
| [databricks_token.pat](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/token) | resource |
294308
| [databricks_ip_access_list.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/ip_access_list) | resource |
295309
| [databricks_sql_global_config.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_global_config) | resource |
296310
| [databricks_sql_endpoint.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_endpoint) | resource |
@@ -332,14 +346,13 @@ No modules.
332346
| <a name="input_suffix"></a> [suffix](#input\_suffix) | Optional suffix that would be added to the end of resources names. | `string` | " " | no |
333347
| <a name="input_external_metastore_id"></a> [external\_metastore\_id](#input\_external\_metastore\_id) | Unity Catalog Metastore Id that is located in separate environment. Provide this value to associate Databricks Workspace with target Metastore | `string` | " " | no |
334348
| <a name="input_metastore_grants"></a> [metastore\_grants](#input\_metastore\_grants) | Permissions to give on metastore to group | `map(list(string))` | {} | no |
335-
| <a name="input_secret_scope_object"></a> [secret\_scope\_object](#input\_secret\_scope\_object) | List of objects, where 'scope_name' param is a Secret scope name and 'acl' are list of objects with 'principals' and one of allowed 'permission' ('READ', 'WRITE' or 'MANAGE') | <pre>list(object({<br> scope_name = string<br> acl = list(object({<br> principal = string<br> permission = string<br> }))<br>}))</pre> | <pre>[{<br> scope_name = null<br> acl = null<br>}]</pre> | no |
336349
| <a name="input_sp_client_id_secret_name"></a> [sp\_client\_id\_secret\_name](#input\_sp\_client\_id\_secret\_name) | The name of Azure Key Vault secret that contains ClientID of Service Principal to access in Azure Key Vault | `string` | n/a | yes |
337350
| <a name="input_sp_key_secret_name"></a> [sp\_key\_secret\_name](#input\_sp\_key\_secret\_name) | The name of Azure Key Vault secret that contains client secret of Service Principal to access in Azure Key Vault | `string` | n/a | yes |
338351
| <a name="input_secret_scope"></a> [secret\_scope](#input\_secret\_scope) | Provides an ability to create custom Secret Scope, store secrets in it and assigning ACL for access management | <pre>list(object({<br> scope_name = string<br> acl = optional(list(object({<br> principal = string<br> permission = string<br> })))<br> secrets = optional(list(object({<br> key = string<br> string_value = string<br> })))<br>}))<br></pre> | <pre>default = [{<br> scope_name = null<br> acl = null<br> secrets = null<br>}]<br></pre> | yes |
339352
| <a name="input_key_vault_id"></a> [key\_vault\_id](#input\_key\_vault\_id) | ID of the Key Vault instance where the Secret resides | `string` | n/a | yes |
340353
| <a name="input_tenant_id_secret_name"></a> [tenant\_id\_secret\_name](#input\_tenant\_id\_secret\_name) | The name of Azure Key Vault secret that contains tenant ID secret of Service Principal to access in Azure Key Vault | `string` | n/a | yes |
341354
| <a name="input_mountpoints"></a> [mountpoints](#input\_mountpoints) | Mountpoints for databricks | <pre>map(object({<br> storage_account_name = string<br> container_name = string<br>}))<br></pre> |{}| no |
342-
| <a name="input_custom_cluster_policies"></a> [custom\_cluster\_policies](#input\_custom\_cluster\_policies) | Provides an ability to create custom cluster policy, assign it to cluster and grant CAN_USE permissions on it to certain custom groups | <pre>list(object({<br> name = string<br> can_use = list(string)<br> definition = any<br> assigned = bool<br>}))<br></pre> |<pre>[{<br> name = null<br> can_use = null<br> definition = null<br> assigned = false<br>}]<br></pre>| no |
355+
| <a name="input_custom_cluster_policies"></a> [custom\_cluster\_policies](#input\_custom\_cluster\_policies) | Provides an ability to create custom cluster policy, assign it to cluster and grant CAN_USE permissions on it to certain custom groups | <pre>list(object({<br> name = string<br> can_use = list(string)<br> definition = any<br>}))<br></pre> |<pre>[{<br> name = null<br> can_use = null<br> definition = null<br>}]<br></pre>| no |
343356
| <a name="input_clusters"></a> [clusters](#input\_clusters) | Set of objects with parameters to configure Databricks clusters and assign permissions to it for certain custom groups | <pre>set(object({<br> cluster_name = string<br> spark_version = optional(string)<br> spark_conf = optional(map(any))<br> spark_env_vars = optional(map(any))<br> data_security_mode = optional(string)<br> node_type_id = optional(string)<br> autotermination_minutes = optional(number)<br> min_workers = optional(number)<br> max_workers = optional(number)<br> max_workers = optional(number)<br> availability = optional(string)<br> first_on_demand = optional(number)<br> spot_bid_max_price = optional(number)<br> cluster_log_conf_destination = optional(string)<br> permissions = optional(set(object({<br> group_name = string<br> permission_level = string<br> })), [])<br>}))<br></pre> | <pre>set(object({<br> cluster_name = string<br> spark_version = optional(string, "11.3.x-scala2.12")<br> spark_conf = optional(map(any), {})<br> spark_env_vars = optional(map(any), {})<br> data_security_mode = optional(string, "USER_ISOLATION")<br> node_type_id = optional(string, "Standard_D3_v2")<br> autotermination_minutes = optional(number, 30)<br> min_workers = optional(number, 1)<br> max_workers = optional(number, 2)<br> max_workers = optional(number, 2)<br> availability = optional(string, "ON_DEMAND_AZURE")<br> first_on_demand = optional(number, 0)<br> spot_bid_max_price = optional(number, 1)<br> cluster_log_conf_destination = optional(string, null)<br> permissions = optional(set(object({<br> group_name = string<br> permission_level = string<br> })), [])<br>}))<br></pre> | no |
344357

345358

@@ -351,6 +364,7 @@ No modules.
351364
| <a name="output_sql_endpoint_jdbc_url"></a> [sql\_endpoint\_jdbc\_url](#output\_sql\_endpoint\_jdbc\_url) | JDBC connection string of SQL Endpoint |
352365
| <a name="output_sql_endpoint_data_source_id"></a> [sql\_endpoint\_data\_source\_id](#output\_sql\_endpoint\_data\_source\_id) | ID of the data source for this endpoint |
353366
| <a name="output_metastore_id"></a> [metastore\_id](#output\_metastore\_id) | Unity Catalog Metastore Id |
367+
| <a name="output_token"></a> [token](#output\_token) | Databricks Personal Authorization Token |
354368
<!-- END_TF_DOCS -->
355369

356370
## License

cluster.tf

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,3 +35,13 @@ resource "databricks_cluster" "cluster" {
3535
]
3636
}
3737
}
38+
39+
resource "databricks_cluster_policy" "this" {
40+
for_each = {
41+
for param in var.custom_cluster_policies : (param.name) => param.definition
42+
if param.definition != null
43+
}
44+
45+
name = each.key
46+
definition = jsonencode(each.value)
47+
}

main.tf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,11 @@ resource "databricks_workspace_conf" "this" {
2626
}
2727
}
2828

29+
resource "databricks_token" "pat" {
30+
comment = "Terraform Provisioning"
31+
lifetime_seconds = var.pat_token_lifetime_seconds
32+
}
33+
2934
resource "databricks_ip_access_list" "this" {
3035
count = local.ip_rules == null ? 0 : 1
3136

outputs.tf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,8 @@ output "metastore_id" {
1212
value = var.create_metastore ? databricks_metastore.this[0].id : ""
1313
description = "Unity Catalog Metastore Id"
1414
}
15+
16+
output "token" {
17+
value = databricks_token.pat.token_value
18+
description = "Databricks Personal Authorization Token"
19+
}

permissions.tf

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,18 @@
11
locals {
2-
secrets_acl_objects_list = flatten([for param in var.secret_scope_object : [
2+
secret_scope_object = {
3+
value = [for param in var.secret_scope : {
4+
scope_name = databricks_secret_scope.this[param.scope_name].name
5+
acl = param.acl
6+
} if param.acl != null]
7+
}
8+
9+
secrets_acl_objects_list = flatten([for param in local.secret_scope_object : [
310
for permission in param.acl : {
411
scope = param.scope_name, principal = permission.principal, permission = permission.permission
512
}] if param.acl != null
613
])
714
}
815

9-
resource "databricks_cluster_policy" "this" {
10-
for_each = {
11-
for param in var.custom_cluster_policies : (param.name) => param.definition
12-
if param.definition != null
13-
}
14-
15-
name = each.key
16-
definition = jsonencode(each.value)
17-
}
18-
1916
resource "databricks_permissions" "clusters" {
2017
for_each = {
2118
for v in var.clusters : (v.cluster_name) => v

variables.tf

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ variable "iam" {
4848
user = optional(list(string))
4949
service_principal = optional(list(string))
5050
entitlements = optional(list(string))
51-
default_cluster_permission = optional(string)
5251
}))
5352
description = "Used to create workspace group. Map of group name and its parameters, such as users and service principals added to the group. Also possible to configure group entitlements."
5453
default = {}
@@ -150,22 +149,6 @@ variable "metastore_grants" {
150149
}
151150
}
152151

153-
# Secret Scope ACLs variables
154-
variable "secret_scope_object" {
155-
type = list(object({
156-
scope_name = string
157-
acl = list(object({
158-
principal = string
159-
permission = string
160-
}))
161-
}))
162-
description = "List of objects, where 'scope_name' param is a Secret scope name and 'acl' are list of objects with 'principals' and one of allowed 'permission' ('READ', 'WRITE' or 'MANAGE')"
163-
default = [{
164-
scope_name = null
165-
acl = null
166-
}]
167-
}
168-
169152
variable "sp_client_id_secret_name" {
170153
type = string
171154
description = "The name of Azure Key Vault secret that contains ClientID of Service Principal to access in Azure Key Vault"
@@ -226,25 +209,18 @@ variable "custom_cluster_policies" {
226209
name = string
227210
can_use = list(string)
228211
definition = any
229-
assigned = bool
230212
}))
231213
description = <<-EOT
232214
Provides an ability to create custom cluster policy, assign it to cluster and grant CAN_USE permissions on it to certain custom groups
233215
name - name of custom cluster policy to create
234216
can_use - list of string, where values are custom group names, there groups have to be created with Terraform;
235217
definition - JSON document expressed in Databricks Policy Definition Language. No need to call 'jsonencode()' function on it when providing a value;
236-
assigned - boolean flag which assigns policy to default 'shared autoscaling' cluster, only single custom policy could be assigned;
237218
EOT
238219
default = [{
239220
name = null
240221
can_use = null
241222
definition = null
242-
assigned = false
243223
}]
244-
validation {
245-
condition = length([for policy in var.custom_cluster_policies : policy.assigned if policy.assigned]) <= 1
246-
error_message = "Only single cluster policy assignment allowed. Please set 'assigned' parameter to 'true' for exact one or none policy"
247-
}
248224
}
249225

250226
variable "clusters" {

0 commit comments

Comments
 (0)