Skip to content

Commit a14660b

Browse files
authored
Merge pull request #4 from data-platform-hq/feat-cluster-access-mode
feat: unity catalog cluster updates
2 parents e9208bd + 04699c5 commit a14660b

File tree

6 files changed

+51
-37
lines changed

6 files changed

+51
-37
lines changed

README.md

Lines changed: 25 additions & 24 deletions
Large diffs are not rendered by default.

main.tf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ resource "databricks_cluster" "this" {
4545
cluster_name = "shared autoscaling"
4646
spark_version = var.spark_version
4747

48+
data_security_mode = var.data_security_mode
4849
node_type_id = var.node_type
4950
autotermination_minutes = var.autotermination_minutes
5051

@@ -63,5 +64,9 @@ resource "databricks_cluster" "this" {
6364
ignore_changes = [
6465
state
6566
]
67+
precondition {
68+
condition = var.data_security_mode == "USER_ISOLATION" ? contains(["11.3.x-scala2.12", "12.0.x-scala2.12"], var.spark_version) : true
69+
error_message = "When USER_ISOLATION is selected, please set spark version to be either one of these values: '11.3.x-scala2.12', '12.0.x-scala2.12'"
70+
}
6671
}
6772
}

mount.tf

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,21 @@
1+
locals {
2+
secret_scope_name = var.use_local_secret_scope ? databricks_secret_scope.this[0].name : "main"
3+
mount_secret_name = var.use_local_secret_scope ? databricks_secret.this[var.sp_key_secret_name].config_reference : "{{secrets/${local.secret_scope_name}/${data.azurerm_key_vault_secret.sp_key.name}}}"
4+
}
5+
16
resource "databricks_mount" "adls" {
27
for_each = var.mountpoints
38

4-
cluster_id = databricks_cluster.this.id
5-
name = each.key
6-
uri = "abfss://${each.value["container_name"]}@${each.value["storage_account_name"]}.dfs.core.windows.net/${each.value["root_path"]}"
9+
name = each.key
10+
uri = "abfss://${each.value["container_name"]}@${each.value["storage_account_name"]}.dfs.core.windows.net/${each.value["root_path"]}"
711
extra_configs = {
812
"fs.azure.account.auth.type" : "OAuth",
913
"fs.azure.account.oauth.provider.type" : "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
1014
"fs.azure.account.oauth2.client.id" : data.azurerm_key_vault_secret.sp_client_id.value,
11-
"fs.azure.account.oauth2.client.secret" : "{{secrets/${local.secret_scope_name}/${data.azurerm_key_vault_secret.sp_key.name}}}",
12-
"fs.azure.account.oauth2.client.secret" : "{{secrets/${local.secret_scope_name}/${local.mount_secret_name}}}",
15+
"fs.azure.account.oauth2.client.secret" : local.mount_secret_name,
1316
"fs.azure.account.oauth2.client.endpoint" : "https://login.microsoftonline.com/${data.azurerm_key_vault_secret.tenant_id.value}/oauth2/token",
1417
"fs.azure.createRemoteFileSystemDuringInitialization" : "false",
1518
"spark.databricks.sqldw.jdbc.service.principal.client.id" : data.azurerm_key_vault_secret.sp_client_id.value,
16-
"spark.databricks.sqldw.jdbc.service.principal.client.secret" : "{{secrets/${local.secret_scope_name}/${data.azurerm_key_vault_secret.sp_key.name}}}",
17-
"spark.databricks.sqldw.jdbc.service.principal.client.secret" : "{{secrets/${local.secret_scope_name}/${local.mount_secret_name}}}",
19+
"spark.databricks.sqldw.jdbc.service.principal.client.secret" : local.mount_secret_name,
1820
}
1921
}

secrets.tf

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,3 @@
1-
locals {
2-
secret_scope_name = var.use_local_secret_scope ? databricks_secret_scope.this[0].name : "main"
3-
mount_secret_name = var.use_local_secret_scope ? databricks_secret.this[var.sp_key_secret_name].key : data.azurerm_key_vault_secret.sp_key.name
4-
}
51
resource "databricks_secret_scope" "this" {
62
count = var.use_local_secret_scope ? 1 : 0
73

variables.tf

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,10 +101,20 @@ variable "permissions" {
101101
]
102102
}
103103

104+
variable "data_security_mode" {
105+
type = string
106+
description = "Security features of the cluster"
107+
default = "USER_ISOLATION"
108+
validation {
109+
condition = contains(["SINGLE_USER", "USER_ISOLATION", "NONE"], var.data_security_mode)
110+
error_message = "Catalog Access mode must be either 'SINGLE_USER', 'USER_ISOLATION' or 'NONE' value"
111+
}
112+
}
113+
104114
variable "spark_version" {
105115
type = string
106116
description = "Runtime version"
107-
default = "9.1.x-scala2.12"
117+
default = "11.3.x-scala2.12"
108118
}
109119

110120
variable "node_type" {

versions.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ terraform {
88
}
99
databricks = {
1010
source = "databricks/databricks"
11-
version = ">=1.4.0"
11+
version = ">=1.8.0"
1212
}
1313
}
1414
}

0 commit comments

Comments
 (0)