Skip to content

Commit 2ba8a6e

Browse files
authored
Merge pull request #34 from data-platform-hq/feat/888-optional-mount
feat: optional resources for mount
2 parents 9a7c77d + a5a02b5 commit 2ba8a6e

File tree

4 files changed

+63
-59
lines changed

4 files changed

+63
-59
lines changed

main.tf

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -3,21 +3,6 @@ locals {
33
suffix = length(var.suffix) == 0 ? "" : "-${var.suffix}"
44
}
55

6-
data "azurerm_key_vault_secret" "sp_client_id" {
7-
name = var.sp_client_id_secret_name
8-
key_vault_id = var.key_vault_id
9-
}
10-
11-
data "azurerm_key_vault_secret" "sp_key" {
12-
name = var.sp_key_secret_name
13-
key_vault_id = var.key_vault_id
14-
}
15-
16-
data "azurerm_key_vault_secret" "tenant_id" {
17-
name = var.tenant_id_secret_name
18-
key_vault_id = var.key_vault_id
19-
}
20-
216
resource "databricks_workspace_conf" "this" {
227
count = local.ip_rules == null ? 0 : 1
238

mount.tf

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
resource "databricks_mount" "adls" {
2-
for_each = var.mountpoints
2+
for_each = var.mount_enabled ? var.mountpoints : {}
33

44
name = each.key
55
cluster_id = var.mount_cluster_name != null ? databricks_cluster.cluster[var.mount_cluster_name].id : null
@@ -10,11 +10,9 @@ resource "databricks_mount" "adls" {
1010
} : {
1111
"fs.azure.account.auth.type" : "OAuth",
1212
"fs.azure.account.oauth.provider.type" : "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
13-
"fs.azure.account.oauth2.client.id" : data.azurerm_key_vault_secret.sp_client_id.value,
14-
"fs.azure.account.oauth2.client.secret" : databricks_secret.main[data.azurerm_key_vault_secret.sp_key.name].config_reference,
15-
"fs.azure.account.oauth2.client.endpoint" : "https://login.microsoftonline.com/${data.azurerm_key_vault_secret.tenant_id.value}/oauth2/token",
13+
"fs.azure.account.oauth2.client.id" : var.mount_service_principal_client_id,
14+
"fs.azure.account.oauth2.client.secret" : databricks_secret.main["mount-sp-secret"].config_reference,
15+
"fs.azure.account.oauth2.client.endpoint" : "https://login.microsoftonline.com/${var.mount_service_principal_tenant_id}/oauth2/token",
1616
"fs.azure.createRemoteFileSystemDuringInitialization" : "false",
17-
"spark.databricks.sqldw.jdbc.service.principal.client.id" : data.azurerm_key_vault_secret.sp_client_id.value,
18-
"spark.databricks.sqldw.jdbc.service.principal.client.secret" : databricks_secret.main[data.azurerm_key_vault_secret.sp_key.name].config_reference
1917
}
2018
}

secrets.tf

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
locals {
2-
sp_secrets = {
3-
(var.sp_client_id_secret_name) = { value = data.azurerm_key_vault_secret.sp_client_id.value }
4-
(var.sp_key_secret_name) = { value = data.azurerm_key_vault_secret.sp_key.value }
2+
mount_sp_secrets = {
3+
mount-sp-client-id = { value = var.mount_service_principal_client_id }
4+
mount-sp-secret = { value = var.mount_service_principal_secret }
55
}
66

77
secrets_objects_list = flatten([for param in var.secret_scope : [
@@ -13,16 +13,25 @@ locals {
1313

1414
# Secret Scope with SP secrets for mounting Azure Data Lake Storage
1515
resource "databricks_secret_scope" "main" {
16+
count = var.mount_enabled ? 1 : 0
17+
1618
name = "main"
1719
initial_manage_principal = null
1820
}
1921

2022
resource "databricks_secret" "main" {
21-
for_each = local.sp_secrets
23+
for_each = var.mount_enabled ? local.mount_sp_secrets : {}
2224

2325
key = each.key
2426
string_value = each.value["value"]
25-
scope = databricks_secret_scope.main.id
27+
scope = databricks_secret_scope.main[0].id
28+
29+
lifecycle {
30+
precondition {
31+
condition = var.mount_enabled ? length(compact([var.mount_service_principal_client_id, var.mount_service_principal_secret, var.mount_service_principal_tenant_id])) == 3 : true
32+
error_message = "To mount ADLS Storage, please provide prerequisite Service Principal values - 'mount_service_principal_object_id', 'mount_service_principal_secret', 'mount_service_principal_tenant_id'."
33+
}
34+
}
2635
}
2736

2837
# Custom additional Databricks Secret Scope
@@ -52,8 +61,8 @@ resource "azurerm_key_vault_access_policy" "databricks" {
5261
} : {}
5362

5463
key_vault_id = each.value.key_vault_id
55-
object_id = "9b38785a-6e08-4087-a0c4-20634343f21f" # Global 'AzureDatabricks' SP object id
56-
tenant_id = data.azurerm_key_vault_secret.tenant_id.value
64+
object_id = var.global_databricks_sp_object_id
65+
tenant_id = each.value.tenant_id
5766

5867
secret_permissions = [
5968
"Get",

variables.tf

Lines changed: 43 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -76,16 +76,6 @@ variable "sql_endpoint" {
7676
default = []
7777
}
7878

79-
variable "sp_client_id_secret_name" {
80-
type = string
81-
description = "The name of Azure Key Vault secret that contains ClientID of Service Principal to access in Azure Key Vault"
82-
}
83-
84-
variable "sp_key_secret_name" {
85-
type = string
86-
description = "The name of Azure Key Vault secret that contains client secret of Service Principal to access in Azure Key Vault"
87-
}
88-
8979
# Secret Scope variables
9080
variable "secret_scope" {
9181
type = list(object({
@@ -112,12 +102,13 @@ EOT
112102
}]
113103
}
114104

115-
variable "key_vault_id" {
105+
# Azure Key Vault-backed Secret Scope
106+
variable "global_databricks_sp_object_id" {
116107
type = string
117-
description = "ID of the Key Vault instance where the Secret resides"
108+
description = "Global 'AzureDatabricks' SP object id. Used to create Key Vault Access Policy for Secret Scope"
109+
default = "9b38785a-6e08-4087-a0c4-20634343f21f"
118110
}
119111

120-
# Azure Key Vault-backed Secret Scope
121112
variable "create_databricks_access_policy_to_key_vault" {
122113
type = bool
123114
description = "Boolean flag to enable creation of Key Vault Access Policy for Databricks Global Service Principal."
@@ -126,28 +117,15 @@ variable "create_databricks_access_policy_to_key_vault" {
126117

127118
variable "key_vault_secret_scope" {
128119
type = list(object({
129-
name = optional(string)
130-
key_vault_id = optional(string)
131-
dns_name = optional(string)
120+
name = string
121+
key_vault_id = string
122+
dns_name = string
123+
tenant_id = string
132124
}))
133125
description = "Object with Azure Key Vault parameters required for creation of Azure-backed Databricks Secret scope"
134126
default = []
135127
}
136128

137-
variable "tenant_id_secret_name" {
138-
type = string
139-
description = "The name of Azure Key Vault secret that contains tenant ID secret of Service Principal to access in Azure Key Vault"
140-
}
141-
142-
variable "mountpoints" {
143-
type = map(object({
144-
storage_account_name = string
145-
container_name = string
146-
}))
147-
description = "Mountpoints for databricks"
148-
default = {}
149-
}
150-
151129
variable "custom_cluster_policies" {
152130
type = list(object({
153131
name = string
@@ -199,9 +177,43 @@ variable "pat_token_lifetime_seconds" {
199177
default = 315569520
200178
}
201179

180+
# Mount ADLS Gen2 Filesystem
181+
variable "mount_enabled" {
182+
type = bool
183+
description = "Boolean flag that determines whether mount point for storage account filesystem is created"
184+
default = false
185+
}
186+
187+
variable "mount_service_principal_client_id" {
188+
type = string
189+
description = "Application(client) Id of Service Principal used to perform storage account mounting"
190+
default = null
191+
}
192+
variable "mount_service_principal_secret" {
193+
type = string
194+
description = "Service Principal Secret used to perform storage account mounting"
195+
default = null
196+
sensitive = true
197+
}
198+
199+
variable "mount_service_principal_tenant_id" {
200+
type = string
201+
description = "Service Principal tenant id used to perform storage account mounting"
202+
default = null
203+
}
204+
205+
variable "mountpoints" {
206+
type = map(object({
207+
storage_account_name = string
208+
container_name = string
209+
}))
210+
description = "Mountpoints for databricks"
211+
default = {}
212+
}
213+
202214
variable "mount_adls_passthrough" {
203215
type = bool
204-
description = "Boolean flag to use mount options for credentals passthrough. Should be used with mount_cluster_name, specified cluster should have option cluster_conf_passthrought == true"
216+
description = "Boolean flag to use mount options for credentials passthrough. Should be used with mount_cluster_name, specified cluster should have option cluster_conf_passthrought == true"
205217
default = false
206218
}
207219

0 commit comments

Comments
 (0)