Skip to content

Commit 58a10c2

Browse files
authored
Merge pull request #18 from data-platform-hq/move-resources
fix: move cluster policy to cluster tf
2 parents cb81e91 + f3e456e commit 58a10c2

File tree

6 files changed

+84
-96
lines changed

6 files changed

+84
-96
lines changed

README.md

Lines changed: 53 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,14 @@ Here we provide some examples of how to provision it with a different options.
1010

1111
### In example below, these features of given module would be covered:
1212
1. Workspace admins assignment, custom Workspace group creation, group assignments, group entitlements
13-
2. Workspace IP Access list creation
14-
3. SQL Endpoint creation and configuration
15-
4. Create Cluster policy and assign permissions to custom groups
16-
5. Create Secret Scope and assign permissions to custom groups
17-
6. Connect to already existing Unity Catalog Metastore
13+
2. Clusters (i.e., for Unity Catalog and Shared Autoscaling)
14+
3. Workspace IP Access list creation
15+
4. ADLS Gen2 Mount
16+
5. Secret scope and its secrets
17+
6. SQL Endpoint creation and configuration
18+
7. Create Cluster policy and assign permissions to custom groups
19+
8. Create Secret Scope and assign permissions to custom groups
20+
9. Connect to already existing Unity Catalog Metastore
1821

1922
```hcl
2023
# Prerequisite resources
@@ -38,12 +41,13 @@ data "azurerm_key_vault" "example" {
3841
resource_group_name = "example-rg"
3942
}
4043
41-
# Given module is tightly coupled with this "Runtime Premium" module, it's usage is prerequisite.
42-
module "databricks_runtime_core" {
43-
source = "data-platform-hq/databricks-runtime/databricks"
44+
# Example usage of module for Runtime Premium resources.
45+
module "databricks_runtime_premium" {
46+
source = "data-platform-hq/databricks-runtime-premium/databricks"
4447
45-
sku = data.databricks_workspace.example.sku
46-
workspace_id = data.databricks_workspace.example.workspace_id
48+
project = "datahq"
49+
env = "example"
50+
location = "eastus"
4751
4852
# Parameters of Service principal used for ADLS mount
4953
# Imports App ID and Secret of Service Principal from target Key Vault
@@ -52,10 +56,18 @@ module "databricks_runtime_core" {
5256
sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
5357
tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
5458
55-
# Default cluster parameters
59+
# Databricks clusters configuration
60+
databricks_cluster_configs = [{
61+
cluster_name = "shared autoscaling"
62+
data_security_mode = "NONE"
63+
availability = "SPOT_AZURE"
64+
spot_bid_max_price = -1
65+
permissions = [{group_name = "dev", permission_level = "CAN_MANAGE"}]
66+
}]
67+
68+
# Databricks cluster policies
5669
custom_cluster_policies = [{
5770
name = "custom_policy_1",
58-
assigned = true, # automatically assigns this policy to default shared cluster if set 'true'
5971
can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
6072
definition = {
6173
"autoscale.max_workers": {
@@ -65,38 +77,18 @@ module "databricks_runtime_core" {
6577
},
6678
}
6779
}]
68-
69-
# Additional Secret Scope
70-
secret_scope = [{
71-
scope_name = "extra-scope"
72-
# Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
73-
acl = [
74-
{ principal = "DEVELOPERS", permission = "READ" }
75-
]
76-
secrets = [
77-
{ key = "secret-name", string_value = "secret-value"}
78-
]
79-
}]
80-
81-
providers = {
82-
databricks = databricks.main
83-
}
84-
}
85-
86-
# Example usage of module for Runtime Premium resources.
87-
module "databricks_runtime_premium" {
88-
source = "data-platform-hq/databricks-runtime-premium/databricks"
89-
90-
project = "datahq"
91-
env = "example"
92-
location = "eastus"
93-
9480
# Workspace could be accessed only from these IP Addresses:
9581
ip_rules = {
9682
"ip_range_1" = "10.128.0.0/16",
9783
"ip_range_2" = "10.33.0.0/16",
9884
}
9985
86+
# ADLS Gen2 Mount
87+
mountpoints = {
88+
storage_account_name = data.azurerm_storage_account.example.name
89+
container_name = "example_container"
90+
}
91+
10092
# Here is the map of users and theirs object ids.
10193
# This step is optional, in case of Service Principal assignment to workspace,
10294
# please only required to provide APP ID as it's value
@@ -112,12 +104,8 @@ module "databricks_runtime_premium" {
112104
113105
# Workspace admins
114106
workspace_admins = {
115-
user = [
116-
"user1@example.com"
117-
]
118-
service_principal = [
119-
"example-app-id"
120-
]
107+
user = ["user1@example.com"]
108+
service_principal = ["example-app-id"]
121109
}
122110
123111
# Custom Workspace group with assigned users/service_principals.
@@ -130,13 +118,16 @@ module "databricks_runtime_premium" {
130118
]
131119
"service_principal" = []
132120
entitlements = ["allow_instance_pool_create","allow_cluster_create","databricks_sql_access"]
133-
default_cluster_permission = "CAN_RESTART" # assigns certain permission on default cluster to created group
134121
}
135122
}
136-
137-
# Assigns acls on secret scope to a custom group ("DEVELOPERS" in this example)
138-
secret_scope_object = module.databricks_runtime_core.secret_scope_object
139123
124+
# Additional Secret Scope
125+
secret_scope = [{
126+
scope_name = "extra-scope"
127+
acl = [{ principal = "DEVELOPERS", permission = "READ" }] # Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
128+
secrets = [{ key = "secret-name", string_value = "secret-value"}]
129+
}]
130+
140131
providers = {
141132
databricks = databricks.main
142133
}
@@ -236,13 +227,16 @@ module "databricks_runtime_premium" {
236227
237228
# Permissions
238229
workspace_admins = {
239-
user = [
240-
"user1@example.com",
241-
]
242-
service_principal = [
243-
"example-app-id"
244-
]
230+
user = ["user1@example.com"]
231+
service_principal = ["example-app-id"]
245232
}
233+
234+
# Cluster for Unity Catalog access
235+
databricks_cluster_configs = [{
236+
cluster_name = "Unity Catalog"
237+
availability = "SPOT_AZURE"
238+
spot_bid_max_price = -1
239+
}]
246240
247241
providers = {
248242
databricks = databricks.main
@@ -290,7 +284,8 @@ No modules.
290284
| [azurerm_key_vault_secret.sp_client_id](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/key_vault_secret) | data |
291285
| [azurerm_key_vault_secret.sp_key](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/key_vault_secret) | data |
292286
| [azurerm_key_vault_secret.tenant_id](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/key_vault_secret) | data |
293-
| [databricks_workspace_conf.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/workspace_conf) | resource |
287+
| [databricks_workspace_conf.pat](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/workspace_conf) | resource |
288+
| [databricks_token.pat](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/token) | resource |
294289
| [databricks_ip_access_list.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/ip_access_list) | resource |
295290
| [databricks_sql_global_config.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_global_config) | resource |
296291
| [databricks_sql_endpoint.this](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_endpoint) | resource |
@@ -332,15 +327,15 @@ No modules.
332327
| <a name="input_suffix"></a> [suffix](#input\_suffix) | Optional suffix that would be added to the end of resources names. | `string` | " " | no |
333328
| <a name="input_external_metastore_id"></a> [external\_metastore\_id](#input\_external\_metastore\_id) | Unity Catalog Metastore Id that is located in separate environment. Provide this value to associate Databricks Workspace with target Metastore | `string` | " " | no |
334329
| <a name="input_metastore_grants"></a> [metastore\_grants](#input\_metastore\_grants) | Permissions to give on metastore to group | `map(list(string))` | {} | no |
335-
| <a name="input_secret_scope_object"></a> [secret\_scope\_object](#input\_secret\_scope\_object) | List of objects, where 'scope_name' param is a Secret scope name and 'acl' are list of objects with 'principals' and one of allowed 'permission' ('READ', 'WRITE' or 'MANAGE') | <pre>list(object({<br> scope_name = string<br> acl = list(object({<br> principal = string<br> permission = string<br> }))<br>}))</pre> | <pre>[{<br> scope_name = null<br> acl = null<br>}]</pre> | no |
336330
| <a name="input_sp_client_id_secret_name"></a> [sp\_client\_id\_secret\_name](#input\_sp\_client\_id\_secret\_name) | The name of Azure Key Vault secret that contains ClientID of Service Principal to access in Azure Key Vault | `string` | n/a | yes |
337331
| <a name="input_sp_key_secret_name"></a> [sp\_key\_secret\_name](#input\_sp\_key\_secret\_name) | The name of Azure Key Vault secret that contains client secret of Service Principal to access in Azure Key Vault | `string` | n/a | yes |
338332
| <a name="input_secret_scope"></a> [secret\_scope](#input\_secret\_scope) | Provides an ability to create custom Secret Scope, store secrets in it and assigning ACL for access management | <pre>list(object({<br> scope_name = string<br> acl = optional(list(object({<br> principal = string<br> permission = string<br> })))<br> secrets = optional(list(object({<br> key = string<br> string_value = string<br> })))<br>}))<br></pre> | <pre>default = [{<br> scope_name = null<br> acl = null<br> secrets = null<br>}]<br></pre> | yes |
339333
| <a name="input_key_vault_id"></a> [key\_vault\_id](#input\_key\_vault\_id) | ID of the Key Vault instance where the Secret resides | `string` | n/a | yes |
340334
| <a name="input_tenant_id_secret_name"></a> [tenant\_id\_secret\_name](#input\_tenant\_id\_secret\_name) | The name of Azure Key Vault secret that contains tenant ID secret of Service Principal to access in Azure Key Vault | `string` | n/a | yes |
341335
| <a name="input_mountpoints"></a> [mountpoints](#input\_mountpoints) | Mountpoints for databricks | <pre>map(object({<br> storage_account_name = string<br> container_name = string<br>}))<br></pre> |{}| no |
342-
| <a name="input_custom_cluster_policies"></a> [custom\_cluster\_policies](#input\_custom\_cluster\_policies) | Provides an ability to create custom cluster policy, assign it to cluster and grant CAN_USE permissions on it to certain custom groups | <pre>list(object({<br> name = string<br> can_use = list(string)<br> definition = any<br> assigned = bool<br>}))<br></pre> |<pre>[{<br> name = null<br> can_use = null<br> definition = null<br> assigned = false<br>}]<br></pre>| no |
336+
| <a name="input_custom_cluster_policies"></a> [custom\_cluster\_policies](#input\_custom\_cluster\_policies) | Provides an ability to create custom cluster policy, assign it to cluster and grant CAN_USE permissions on it to certain custom groups | <pre>list(object({<br> name = string<br> can_use = list(string)<br> definition = any<br>}))<br></pre> |<pre>[{<br> name = null<br> can_use = null<br> definition = null<br>}]<br></pre>| no |
343337
| <a name="input_clusters"></a> [clusters](#input\_clusters) | Set of objects with parameters to configure Databricks clusters and assign permissions to it for certain custom groups | <pre>set(object({<br> cluster_name = string<br> spark_version = optional(string)<br> spark_conf = optional(map(any))<br> spark_env_vars = optional(map(any))<br> data_security_mode = optional(string)<br> node_type_id = optional(string)<br> autotermination_minutes = optional(number)<br> min_workers = optional(number)<br> max_workers = optional(number)<br> max_workers = optional(number)<br> availability = optional(string)<br> first_on_demand = optional(number)<br> spot_bid_max_price = optional(number)<br> cluster_log_conf_destination = optional(string)<br> permissions = optional(set(object({<br> group_name = string<br> permission_level = string<br> })), [])<br>}))<br></pre> | <pre>set(object({<br> cluster_name = string<br> spark_version = optional(string, "11.3.x-scala2.12")<br> spark_conf = optional(map(any), {})<br> spark_env_vars = optional(map(any), {})<br> data_security_mode = optional(string, "USER_ISOLATION")<br> node_type_id = optional(string, "Standard_D3_v2")<br> autotermination_minutes = optional(number, 30)<br> min_workers = optional(number, 1)<br> max_workers = optional(number, 2)<br> max_workers = optional(number, 2)<br> availability = optional(string, "ON_DEMAND_AZURE")<br> first_on_demand = optional(number, 0)<br> spot_bid_max_price = optional(number, 1)<br> cluster_log_conf_destination = optional(string, null)<br> permissions = optional(set(object({<br> group_name = string<br> permission_level = string<br> })), [])<br>}))<br></pre> | no |
338+
| <a name="input_pat_token_lifetime_seconds"></a> [pat\_token\_lifetime\_seconds](#input\_pat\_token\_lifetime\_seconds) | The lifetime of the token, in seconds. If no lifetime is specified, the token remains valid indefinitely | `number` | 315569520 | no |
344339

345340

346341

@@ -351,6 +346,7 @@ No modules.
351346
| <a name="output_sql_endpoint_jdbc_url"></a> [sql\_endpoint\_jdbc\_url](#output\_sql\_endpoint\_jdbc\_url) | JDBC connection string of SQL Endpoint |
352347
| <a name="output_sql_endpoint_data_source_id"></a> [sql\_endpoint\_data\_source\_id](#output\_sql\_endpoint\_data\_source\_id) | ID of the data source for this endpoint |
353348
| <a name="output_metastore_id"></a> [metastore\_id](#output\_metastore\_id) | Unity Catalog Metastore Id |
349+
| <a name="output_token"></a> [token](#output\_token) | Databricks Personal Authorization Token |
354350
<!-- END_TF_DOCS -->
355351

356352
## License

cluster.tf

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,3 +35,13 @@ resource "databricks_cluster" "cluster" {
3535
]
3636
}
3737
}
38+
39+
resource "databricks_cluster_policy" "this" {
40+
for_each = {
41+
for param in var.custom_cluster_policies : (param.name) => param.definition
42+
if param.definition != null
43+
}
44+
45+
name = each.key
46+
definition = jsonencode(each.value)
47+
}

main.tf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,11 @@ resource "databricks_workspace_conf" "this" {
2626
}
2727
}
2828

29+
resource "databricks_token" "pat" {
30+
comment = "Terraform Provisioning"
31+
lifetime_seconds = var.pat_token_lifetime_seconds
32+
}
33+
2934
resource "databricks_ip_access_list" "this" {
3035
count = local.ip_rules == null ? 0 : 1
3136

outputs.tf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,8 @@ output "metastore_id" {
1212
value = var.create_metastore ? databricks_metastore.this[0].id : ""
1313
description = "Unity Catalog Metastore Id"
1414
}
15+
16+
output "token" {
17+
value = databricks_token.pat.token_value
18+
description = "Databricks Personal Authorization Token"
19+
}

permissions.tf

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,11 @@
11
locals {
2-
secrets_acl_objects_list = flatten([for param in var.secret_scope_object : [
2+
secrets_acl_objects_list = flatten([for param in var.secret_scope : [
33
for permission in param.acl : {
44
scope = param.scope_name, principal = permission.principal, permission = permission.permission
55
}] if param.acl != null
66
])
77
}
88

9-
resource "databricks_cluster_policy" "this" {
10-
for_each = {
11-
for param in var.custom_cluster_policies : (param.name) => param.definition
12-
if param.definition != null
13-
}
14-
15-
name = each.key
16-
definition = jsonencode(each.value)
17-
}
18-
199
resource "databricks_permissions" "clusters" {
2010
for_each = {
2111
for v in var.clusters : (v.cluster_name) => v
@@ -53,7 +43,7 @@ resource "databricks_permissions" "sql_endpoint" {
5343
resource "databricks_secret_acl" "this" {
5444
for_each = { for entry in local.secrets_acl_objects_list : "${entry.scope}.${entry.principal}.${entry.permission}" => entry }
5545

56-
scope = each.value.scope
46+
scope = databricks_secret_scope.this[each.value.scope].name
5747
principal = databricks_group.this[each.value.principal].display_name
5848
permission = each.value.permission
5949
}

0 commit comments

Comments
 (0)