@@ -9,70 +9,96 @@ The main idea behind this module is to deploy resources for Databricks Workspace
9
9
Here we provide some examples of how to provision it with a different options.
10
10
11
11
### In example below, these features of given module would be covered:
12
- 1 . Permissions and entitlements for users and groups within Databricks
13
- 2 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
14
- 3 . Workspace IP Access list creation
15
- 4 . ADLS Gen2 Mount
16
- 5 . Create Secret Scope and assign permissions to custom groups
17
- 6 . SQL Endpoint creation and configuration
18
- 7 . Create Cluster policy
19
- 8 . Create an Azure Key Vault-backed secret scope
12
+ 1 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
13
+ 2 . Workspace IP Access list creation
14
+ 3 . ADLS Gen2 Mount
15
+ 4 . Create Secret Scope and assign permissions to custom groups
16
+ 5 . SQL Endpoint creation and configuration
17
+ 6 . Create Cluster policy
18
+ 7 . Create an Azure Key Vault-backed secret scope
20
19
21
20
``` hcl
22
21
# Prerequisite resources
23
22
23
+ variable "databricks_account_id" {}
24
+ variable "cloud_name" {} # cloud provider (e.g., aws, azure)
25
+
24
26
# Databricks Workspace with Premium SKU
25
27
data "azurerm_databricks_workspace" "example" {
26
28
name = "example-workspace"
27
29
resource_group_name = "example-rg"
28
30
}
29
31
32
+ # Databricks Workspace ID for Azure
33
+ data "azurerm_databricks_workspace" "example" {
34
+ name = "example-workspace"
35
+ resource_group_name = "example-rg"
36
+ }
37
+
38
+ # Databricks Workspace ID for AWS
39
+ data "databricks_mws_workspaces" "example" {
40
+ account_id = var.databricks_account_id
41
+ }
42
+
30
43
# Databricks Provider configuration
31
44
provider "databricks" {
32
45
alias = "main"
33
46
host = data.azurerm_databricks_workspace.example.workspace_url
34
47
azure_workspace_resource_id = data.azurerm_databricks_workspace.example.id
35
48
}
36
49
50
+ # Databricks Account-Level Provider configuration
51
+ provider "databricks" {
52
+ alias = "account"
53
+ host = "https://accounts.azuredatabricks.net"
54
+ account_id = var.databricks_account_id
55
+ }
56
+
37
57
# Key Vault where Service Principal's secrets are stored. Used for mounting Storage Container
38
58
data "azurerm_key_vault" "example" {
39
59
name = "example-key-vault"
40
60
resource_group_name = "example-rg"
41
61
}
42
62
63
+ locals {
64
+ databricks_iam_account_groups = [{
65
+ group_name = "example-gn"
66
+ permissions = ["ADMIN"]
67
+ entitlements = [
68
+ "allow_instance_pool_create",
69
+ "allow_cluster_create",
70
+ "databricks_sql_access"
71
+ ]
72
+ }]
73
+ }
74
+
43
75
# Assigns Databricks Account groups to Workspace. It is required to assign Unity Catalog Metastore before assigning Account groups to Workspace
44
76
module "databricks_account_groups" {
45
- count = length(var .databricks_iam_account_groups) != 0 ? 1 : 0
77
+ count = length(local .databricks_iam_account_groups) != 0 ? 1 : 0
46
78
source = "data-platform-hq/databricks-account-groups/databricks"
47
79
version = "1.0.1"
48
-
49
- workspace_id = module.databricks_workspace .workspace_id
50
- workspace_group_assignment = var .databricks_iam_account_groups
80
+
81
+ workspace_id = var.cloud_provider == "azure" ? data.azurerm_databricks_workspace.example.id : data.databricks_mws_workspaces.example.workspaces[0] .workspace_id
82
+ workspace_group_assignment = local .databricks_iam_account_groups
51
83
52
84
providers = {
53
85
databricks = databricks.account
54
86
}
55
87
}
56
88
57
89
# Example usage of module for Runtime Premium resources.
58
- module "databricks_runtime_premium" {
59
- source = "data-platform-hq/databricks-runtime-premium/databricks"
90
+ module "databricks_runtime_premium" {
91
+ source = "data-platform-hq/runtime/databricks"
92
+ version = "~>1.0"
60
93
61
94
project = "datahq"
62
95
env = "example"
63
96
location = "eastus"
64
97
65
- # Parameters of Service principal used for ADLS mount
66
- # Imports App ID and Secret of Service Principal from target Key Vault
67
- key_vault_id = data.azurerm_key_vault.example.id
68
- sp_client_id_secret_name = "sp-client-id" # secret's name that stores Service Principal App ID
69
- sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
70
- tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
71
-
72
98
# Cloud provider
73
- cloud_name = cloud-name # cloud provider (e.g., aws, azure)
99
+ cloud_name = var.cloud_name
74
100
75
- # 1. Permissions and entitlements for users and groups within Databricks
101
+ # Example configuration for Workspace Groups
76
102
iam_workspace_groups = {
77
103
dev = {
78
104
user = [
@@ -84,17 +110,10 @@ module "databricks_runtime_premium" {
84
110
}
85
111
}
86
112
87
- iam_account_groups = [{
88
- group_name = "example-gn"
89
- permissions = ["ADMIN"]
90
- entitlements = [
91
- "allow_instance_pool_create",
92
- "allow_cluster_create",
93
- "databricks_sql_access"
94
- ]
95
- }]
113
+ # Example configuration for Account Groups
114
+ iam_account_groups = local.databricks_iam_account_groups
96
115
97
- # 2 . Databricks clusters configuration, and assign permission to a custom group on clusters.
116
+ # 1 . Databricks clusters configuration, and assign permission to a custom group on clusters.
98
117
databricks_cluster_configs = [ {
99
118
cluster_name = "Unity Catalog"
100
119
data_security_mode = "USER_ISOLATION"
@@ -110,33 +129,39 @@ module "databricks_runtime_premium" {
110
129
permissions = [{group_name = "DEVELOPERS", permission_level = "CAN_MANAGE"}]
111
130
}]
112
131
113
- # 3 . Workspace could be accessed only from these IP Addresses:
132
+ # 2 . Workspace could be accessed only from these IP Addresses:
114
133
ip_rules = {
115
134
"ip_range_1" = "10.128.0.0/16",
116
135
"ip_range_2" = "10.33.0.0/16",
117
136
}
118
137
119
- # 4 . ADLS Gen2 Mount
138
+ # 3 . ADLS Gen2 Mount
120
139
mountpoints = {
121
140
storage_account_name = data.azurerm_storage_account.example.name
122
141
container_name = "example_container"
123
142
}
124
143
125
- # 5. Create Secret Scope and assign permissions to custom groups
144
+ # Parameters of Service principal used for ADLS mount
145
+ # Imports App ID and Secret of Service Principal from target Key Vault
146
+ sp_client_id_secret_name = "sp-client-id" # secret's name that stores Service Principal App ID
147
+ sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
148
+ tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
149
+
150
+ # 4. Create Secret Scope and assign permissions to custom groups
126
151
secret_scope = [{
127
152
scope_name = "extra-scope"
128
153
acl = [{ principal = "DEVELOPERS", permission = "READ" }] # Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
129
154
secrets = [{ key = "secret-name", string_value = "secret-value"}]
130
155
}]
131
156
132
- # 6 . SQL Warehouse Endpoint
157
+ # 5 . SQL Warehouse Endpoint
133
158
databricks_sql_endpoint = [{
134
159
name = "default"
135
160
enable_serverless_compute = true
136
161
permissions = [{ group_name = "DEVELOPERS", permission_level = "CAN_USE" },]
137
162
}]
138
163
139
- # 7 . Databricks cluster policies
164
+ # 6 . Databricks cluster policies
140
165
custom_cluster_policies = [{
141
166
name = "custom_policy_1",
142
167
can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
@@ -149,7 +174,7 @@ module "databricks_runtime_premium" {
149
174
}
150
175
}]
151
176
152
- # 8 . Azure Key Vault-backed secret scope
177
+ # 7 . Azure Key Vault-backed secret scope
153
178
key_vault_secret_scope = [{
154
179
name = "external"
155
180
key_vault_id = data.azurerm_key_vault.example.id
0 commit comments