Skip to content

Commit b7c3c18

Browse files
author
dmytro_velychko3
committed
feat: refactor
1 parent 0a3a93d commit b7c3c18

File tree

5 files changed

+170
-14
lines changed

5 files changed

+170
-14
lines changed

iam.tf

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
locals {
2+
admin_user_map = var.workspace_admins.user == null ? {} : {
3+
for user in var.workspace_admins.user : "user.${user}" => user if user != null
4+
}
5+
6+
admin_sp_map = var.workspace_admins.service_principal == null ? {} : {
7+
for sp in var.workspace_admins.service_principal : "service_principal.${sp}" => sp if sp != null
8+
}
9+
10+
members_object_list = concat(
11+
flatten([for group, params in var.iam : [
12+
for pair in setproduct([group], params.user) : {
13+
type = "user", group = pair[0], member = pair[1]
14+
}] if params.user != null
15+
]),
16+
flatten([for group, params in var.iam : [
17+
for pair in setproduct([group], params.service_principal) : {
18+
type = "service_principal", group = pair[0], member = pair[1]
19+
}] if params.service_principal != null
20+
])
21+
)
22+
}
23+
24+
data "databricks_group" "admin" {
25+
display_name = "admins"
26+
}
27+
28+
resource "databricks_group" "this" {
29+
for_each = toset(keys(var.iam))
30+
31+
display_name = each.key
32+
lifecycle { ignore_changes = [external_id, allow_cluster_create, allow_instance_pool_create, databricks_sql_access, workspace_access] }
33+
}
34+
35+
resource "databricks_user" "this" {
36+
for_each = toset(flatten(concat(
37+
values({ for group, member in var.iam : group => member.user if member.user != null }),
38+
values(local.admin_user_map)
39+
)))
40+
41+
user_name = each.key
42+
lifecycle { ignore_changes = [external_id, allow_cluster_create, allow_instance_pool_create, databricks_sql_access, workspace_access] }
43+
}
44+
45+
resource "databricks_service_principal" "this" {
46+
for_each = toset(flatten(concat(
47+
values({ for group, member in var.iam : group => member.service_principal if member.service_principal != null }),
48+
values(local.admin_sp_map)
49+
)))
50+
51+
display_name = each.key
52+
application_id = lookup(var.user_object_ids, each.value)
53+
lifecycle { ignore_changes = [external_id, allow_cluster_create, allow_instance_pool_create, databricks_sql_access, workspace_access] }
54+
}
55+
56+
resource "databricks_group_member" "admin" {
57+
for_each = merge(local.admin_user_map, local.admin_sp_map)
58+
59+
group_id = data.databricks_group.admin.id
60+
member_id = startswith(each.key, "user") ? databricks_user.this[each.value].id : databricks_service_principal.this[each.value].id
61+
}
62+
63+
resource "databricks_group_member" "this" {
64+
for_each = {
65+
for entry in local.members_object_list : "${entry.type}.${entry.group}.${entry.member}" => entry
66+
}
67+
68+
group_id = databricks_group.this[each.value.group].id
69+
member_id = startswith(each.key, "user") ? databricks_user.this[each.value.member].id : databricks_service_principal.this[each.value.member].id
70+
}
71+
72+
resource "databricks_entitlements" "this" {
73+
for_each = {
74+
for group, params in var.iam : group => params
75+
}
76+
77+
group_id = databricks_group.this[each.key].id
78+
allow_cluster_create = contains(coalesce(each.value.entitlements, ["none"]), "allow_cluster_create")
79+
allow_instance_pool_create = contains(coalesce(each.value.entitlements, ["none"]), "allow_instance_pool_create")
80+
databricks_sql_access = contains(coalesce(each.value.entitlements, ["none"]), "databricks_sql_access")
81+
workspace_access = true
82+
83+
depends_on = [databricks_group_member.this]
84+
}

main.tf

Lines changed: 36 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
/* Premium
2+
locals {
3+
ip_rules = var.ip_rules == null ? null : values(var.ip_rules)
4+
}
5+
*/
6+
17
data "azurerm_key_vault_secret" "sp_client_id" {
28
name = var.sp_client_id_secret_name
39
key_vault_id = var.key_vault_id
@@ -13,18 +19,19 @@ data "azurerm_key_vault_secret" "tenant_id" {
1319
key_vault_id = var.key_vault_id
1420
}
1521

16-
resource "databricks_token" "pat" {
22+
resource "databricks_token" "pat" { #
1723
comment = "Terraform Provisioning"
1824
lifetime_seconds = var.pat_token_lifetime_seconds
1925
}
2026

21-
resource "databricks_user" "this" {
22-
for_each = var.sku == "premium" ? [] : toset(var.users)
23-
user_name = each.value
24-
lifecycle { ignore_changes = [external_id] }
25-
}
27+
#resource "databricks_user" "this" { # Only for 'Standard' SKU type
28+
# #for_each = var.sku == "premium" ? [] : toset(var.users)
29+
# for_each = toset(var.users)
30+
# user_name = each.value
31+
# lifecycle { ignore_changes = [external_id] }
32+
#}
2633

27-
resource "azurerm_role_assignment" "this" {
34+
resource "azurerm_role_assignment" "this" { ###
2835
for_each = {
2936
for permission in var.permissions : "${permission.object_id}-${permission.role}" => permission
3037
if permission.role != null
@@ -35,10 +42,11 @@ resource "azurerm_role_assignment" "this" {
3542
}
3643

3744
resource "databricks_cluster_policy" "this" {
38-
for_each = var.sku == "premium" ? {
45+
#for_each = var.sku == "premium" ? {
46+
for_each = {
3947
for param in var.custom_cluster_policies : (param.name) => param.definition
4048
if param.definition != null
41-
} : {}
49+
} # : {}
4250

4351
name = each.key
4452
definition = jsonencode(each.value)
@@ -50,8 +58,6 @@ resource "databricks_cluster" "this" {
5058
spark_conf = var.spark_conf
5159
spark_env_vars = var.spark_env_vars
5260

53-
policy_id = var.sku == "premium" ? one([for policy in var.custom_cluster_policies : databricks_cluster_policy.this[policy.name].id if policy.assigned]) : null
54-
5561
data_security_mode = var.data_security_mode
5662
node_type_id = var.node_type
5763
autotermination_minutes = var.autotermination_minutes
@@ -86,3 +92,22 @@ resource "databricks_cluster" "this" {
8692
}
8793
}
8894
}
95+
/* Premium
96+
resource "databricks_workspace_conf" "this" {
97+
count = local.ip_rules == null ? 0 : 1
98+
99+
custom_config = {
100+
"enableIpAccessLists" : true
101+
}
102+
}
103+
104+
resource "databricks_ip_access_list" "this" {
105+
count = local.ip_rules == null ? 0 : 1
106+
107+
label = "allow_in"
108+
list_type = "ALLOW"
109+
ip_addresses = local.ip_rules
110+
111+
depends_on = [databricks_workspace_conf.this]
112+
}
113+
*/

outputs.tf

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,12 @@ output "cluster_policies_object" {
1616
} if policy.definition != null && var.sku == "premium"]
1717
description = "Databricks Cluster Policies object map"
1818
}
19-
19+
/*
2020
output "secret_scope_object" {
2121
value = [for param in var.secret_scope : {
2222
scope_name = databricks_secret_scope.this[param.scope_name].name
2323
acl = param.acl
2424
} if param.acl != null]
2525
description = "Databricks-managed Secret Scope object map to create ACLs"
2626
}
27+
*/

secrets.tf

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
12
locals {
23
sp_secrets = {
34
(var.sp_client_id_secret_name) = { value = data.azurerm_key_vault_secret.sp_client_id.value }
@@ -14,7 +15,7 @@ locals {
1415
# Secret Scope with SP secrets for mounting Azure Data Lake Storage
1516
resource "databricks_secret_scope" "main" {
1617
name = "main"
17-
initial_manage_principal = var.sku == "premium" ? null : "users"
18+
initial_manage_principal = "users" #var.sku == "premium" ? null : "users"
1819
}
1920

2021
resource "databricks_secret" "main" {
@@ -33,7 +34,7 @@ resource "databricks_secret_scope" "this" {
3334
}
3435

3536
name = each.key
36-
initial_manage_principal = var.sku == "premium" ? null : "users"
37+
initial_manage_principal = "users"
3738
}
3839

3940
resource "databricks_secret" "this" {

variables.tf

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,3 +216,48 @@ EOT
216216
# dns_name = null
217217
# }
218218
#}
219+
220+
# Identity Access Management variables
221+
variable "user_object_ids" {
222+
type = map(string)
223+
description = "Map of AD usernames and corresponding object IDs"
224+
default = {}
225+
}
226+
227+
variable "workspace_admins" {
228+
type = object({
229+
user = list(string)
230+
service_principal = list(string)
231+
})
232+
description = "Provide users or service principals to grant them Admin permissions in Workspace."
233+
default = {
234+
user = null
235+
service_principal = null
236+
}
237+
}
238+
239+
variable "iam" {
240+
type = map(object({
241+
user = optional(list(string))
242+
service_principal = optional(list(string))
243+
entitlements = optional(list(string))
244+
default_cluster_permission = optional(string)
245+
}))
246+
description = "Used to create workspace group. Map of group name and its parameters, such as users and service principals added to the group. Also possible to configure group entitlements."
247+
default = {}
248+
249+
validation {
250+
condition = length([for item in values(var.iam)[*] : item.entitlements if item.entitlements != null]) != 0 ? alltrue([
251+
for entry in flatten(values(var.iam)[*].entitlements) : contains(["allow_cluster_create", "allow_instance_pool_create", "databricks_sql_access"], entry) if entry != null
252+
]) : true
253+
error_message = "Entitlements validation. The only suitable values are: databricks_sql_access, allow_instance_pool_create, allow_cluster_create"
254+
}
255+
}
256+
257+
/* Premium
258+
variable "ip_rules" {
259+
type = map(string)
260+
description = "Map of IP addresses permitted for access to DB"
261+
default = {}
262+
}
263+
*/

0 commit comments

Comments
 (0)