Skip to content

Commit a5a02b5

Browse files
committed
feat: mount feature optionality
1 parent 499921b commit a5a02b5

File tree

4 files changed

+62
-70
lines changed

4 files changed

+62
-70
lines changed

main.tf

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,25 +3,6 @@ locals {
33
suffix = length(var.suffix) == 0 ? "" : "-${var.suffix}"
44
}
55

6-
data "azurerm_key_vault_secret" "sp_client_id" {
7-
count = var.mount_enabled ? 1 : 0
8-
9-
name = var.sp_client_id_secret_name
10-
key_vault_id = var.key_vault_id
11-
}
12-
13-
data "azurerm_key_vault_secret" "sp_key" {
14-
count = var.mount_enabled ? 1 : 0
15-
16-
name = var.sp_key_secret_name
17-
key_vault_id = var.key_vault_id
18-
}
19-
20-
data "azurerm_key_vault_secret" "tenant_id" {
21-
name = var.tenant_id_secret_name
22-
key_vault_id = var.key_vault_id
23-
}
24-
256
resource "databricks_workspace_conf" "this" {
267
count = local.ip_rules == null ? 0 : 1
278

mount.tf

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,9 @@ resource "databricks_mount" "adls" {
1010
} : {
1111
"fs.azure.account.auth.type" : "OAuth",
1212
"fs.azure.account.oauth.provider.type" : "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
13-
"fs.azure.account.oauth2.client.id" : data.azurerm_key_vault_secret.sp_client_id[0].value,
14-
"fs.azure.account.oauth2.client.secret" : databricks_secret.main[data.azurerm_key_vault_secret.sp_key[0].name].config_reference,
15-
"fs.azure.account.oauth2.client.endpoint" : "https://login.microsoftonline.com/${data.azurerm_key_vault_secret.tenant_id.value}/oauth2/token",
13+
"fs.azure.account.oauth2.client.id" : var.mount_service_principal_client_id,
14+
"fs.azure.account.oauth2.client.secret" : databricks_secret.main["mount-sp-secret"].config_reference,
15+
"fs.azure.account.oauth2.client.endpoint" : "https://login.microsoftonline.com/${var.mount_service_principal_tenant_id}/oauth2/token",
1616
"fs.azure.createRemoteFileSystemDuringInitialization" : "false",
17-
"spark.databricks.sqldw.jdbc.service.principal.client.id" : data.azurerm_key_vault_secret.sp_client_id[0].value,
18-
"spark.databricks.sqldw.jdbc.service.principal.client.secret" : databricks_secret.main[data.azurerm_key_vault_secret.sp_key[0].name].config_reference
1917
}
2018
}

secrets.tf

Lines changed: 17 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
locals {
2-
sp_secrets = var.mount_enabled ? {
3-
(var.sp_client_id_secret_name) = { value = data.azurerm_key_vault_secret.sp_client_id[0].value }
4-
(var.sp_key_secret_name) = { value = data.azurerm_key_vault_secret.sp_key[0].value }
5-
} : {}
2+
mount_sp_secrets = {
3+
mount-sp-client-id = { value = var.mount_service_principal_client_id }
4+
mount-sp-secret = { value = var.mount_service_principal_secret }
5+
}
66

77
secrets_objects_list = flatten([for param in var.secret_scope : [
88
for secret in param.secrets : {
@@ -13,16 +13,25 @@ locals {
1313

1414
# Secret Scope with SP secrets for mounting Azure Data Lake Storage
1515
resource "databricks_secret_scope" "main" {
16+
count = var.mount_enabled ? 1 : 0
17+
1618
name = "main"
1719
initial_manage_principal = null
1820
}
1921

2022
resource "databricks_secret" "main" {
21-
for_each = local.sp_secrets
23+
for_each = var.mount_enabled ? local.mount_sp_secrets : {}
2224

2325
key = each.key
2426
string_value = each.value["value"]
25-
scope = databricks_secret_scope.main.id
27+
scope = databricks_secret_scope.main[0].id
28+
29+
lifecycle {
30+
precondition {
31+
condition = var.mount_enabled ? length(compact([var.mount_service_principal_client_id, var.mount_service_principal_secret, var.mount_service_principal_tenant_id])) == 3 : true
32+
error_message = "To mount ADLS Storage, please provide prerequisite Service Principal values - 'mount_service_principal_object_id', 'mount_service_principal_secret', 'mount_service_principal_tenant_id'."
33+
}
34+
}
2635
}
2736

2837
# Custom additional Databricks Secret Scope
@@ -52,8 +61,8 @@ resource "azurerm_key_vault_access_policy" "databricks" {
5261
} : {}
5362

5463
key_vault_id = each.value.key_vault_id
55-
object_id = "9b38785a-6e08-4087-a0c4-20634343f21f" # Global 'AzureDatabricks' SP object id
56-
tenant_id = data.azurerm_key_vault_secret.tenant_id.value
64+
object_id = var.global_databricks_sp_object_id
65+
tenant_id = each.value.tenant_id
5766

5867
secret_permissions = [
5968
"Get",

variables.tf

Lines changed: 42 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -76,18 +76,6 @@ variable "sql_endpoint" {
7676
default = []
7777
}
7878

79-
variable "sp_client_id_secret_name" {
80-
type = string
81-
description = "The name of Azure Key Vault secret that contains ClientID of Service Principal to access in Azure Key Vault"
82-
default = ""
83-
}
84-
85-
variable "sp_key_secret_name" {
86-
type = string
87-
description = "The name of Azure Key Vault secret that contains client secret of Service Principal to access in Azure Key Vault"
88-
default = ""
89-
}
90-
9179
# Secret Scope variables
9280
variable "secret_scope" {
9381
type = list(object({
@@ -114,12 +102,13 @@ EOT
114102
}]
115103
}
116104

117-
variable "key_vault_id" {
105+
# Azure Key Vault-backed Secret Scope
106+
variable "global_databricks_sp_object_id" {
118107
type = string
119-
description = "ID of the Key Vault instance where the Secret resides"
108+
description = "Global 'AzureDatabricks' SP object id. Used to create Key Vault Access Policy for Secret Scope"
109+
default = "9b38785a-6e08-4087-a0c4-20634343f21f"
120110
}
121111

122-
# Azure Key Vault-backed Secret Scope
123112
variable "create_databricks_access_policy_to_key_vault" {
124113
type = bool
125114
description = "Boolean flag to enable creation of Key Vault Access Policy for Databricks Global Service Principal."
@@ -128,28 +117,15 @@ variable "create_databricks_access_policy_to_key_vault" {
128117

129118
variable "key_vault_secret_scope" {
130119
type = list(object({
131-
name = optional(string)
132-
key_vault_id = optional(string)
133-
dns_name = optional(string)
120+
name = string
121+
key_vault_id = string
122+
dns_name = string
123+
tenant_id = string
134124
}))
135125
description = "Object with Azure Key Vault parameters required for creation of Azure-backed Databricks Secret scope"
136126
default = []
137127
}
138128

139-
variable "tenant_id_secret_name" {
140-
type = string
141-
description = "The name of Azure Key Vault secret that contains tenant ID secret of Service Principal to access in Azure Key Vault"
142-
}
143-
144-
variable "mountpoints" {
145-
type = map(object({
146-
storage_account_name = string
147-
container_name = string
148-
}))
149-
description = "Mountpoints for databricks"
150-
default = {}
151-
}
152-
153129
variable "custom_cluster_policies" {
154130
type = list(object({
155131
name = string
@@ -201,20 +177,48 @@ variable "pat_token_lifetime_seconds" {
201177
default = 315569520
202178
}
203179

204-
variable "mount_adls_passthrough" {
180+
# Mount ADLS Gen2 Filesystem
181+
variable "mount_enabled" {
205182
type = bool
206-
description = "Boolean flag to use mount options for credentals passthrough. Should be used with mount_cluster_name, specified cluster should have option cluster_conf_passthrought == true"
183+
description = "Boolean flag that determines whether mount point for storage account filesystem is created"
207184
default = false
208185
}
209186

210-
variable "mount_cluster_name" {
187+
variable "mount_service_principal_client_id" {
211188
type = string
212-
description = "Name of the cluster that will be used during storage mounting. If mount_adls_passthrough == true, cluster should also have option cluster_conf_passthrought == true"
189+
description = "Application(client) Id of Service Principal used to perform storage account mounting"
190+
default = null
191+
}
192+
variable "mount_service_principal_secret" {
193+
type = string
194+
description = "Service Principal Secret used to perform storage account mounting"
213195
default = null
196+
sensitive = true
214197
}
215198

216-
variable "mount_enabled" {
199+
variable "mount_service_principal_tenant_id" {
200+
type = string
201+
description = "Service Principal tenant id used to perform storage account mounting"
202+
default = null
203+
}
204+
205+
variable "mountpoints" {
206+
type = map(object({
207+
storage_account_name = string
208+
container_name = string
209+
}))
210+
description = "Mountpoints for databricks"
211+
default = {}
212+
}
213+
214+
variable "mount_adls_passthrough" {
217215
type = bool
218-
description = "Boolean flag to enable ADLS mount to Databricks"
216+
description = "Boolean flag to use mount options for credentials passthrough. Should be used with mount_cluster_name, specified cluster should have option cluster_conf_passthrought == true"
219217
default = false
220218
}
219+
220+
variable "mount_cluster_name" {
221+
type = string
222+
description = "Name of the cluster that will be used during storage mounting. If mount_adls_passthrough == true, cluster should also have option cluster_conf_passthrought == true"
223+
default = null
224+
}

0 commit comments

Comments
 (0)