@@ -9,14 +9,13 @@ The main idea behind this module is to deploy resources for Databricks Workspace
9
9
Here we provide some examples of how to provision it with a different options.
10
10
11
11
### In example below, these features of given module would be covered:
12
- 1 . Permissions and entitlements for users and groups within Databricks
13
- 2 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
14
- 3 . Workspace IP Access list creation
15
- 4 . ADLS Gen2 Mount
16
- 5 . Create Secret Scope and assign permissions to custom groups
17
- 6 . SQL Endpoint creation and configuration
18
- 7 . Create Cluster policy
19
- 8 . Create an Azure Key Vault-backed secret scope
12
+ 1 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
13
+ 2 . Workspace IP Access list creation
14
+ 3 . ADLS Gen2 Mount
15
+ 4 . Create Secret Scope and assign permissions to custom groups
16
+ 5 . SQL Endpoint creation and configuration
17
+ 6 . Create Cluster policy
18
+ 7 . Create an Azure Key Vault-backed secret scope
20
19
21
20
``` hcl
22
21
# Prerequisite resources
@@ -58,29 +57,7 @@ module "databricks_runtime_premium" {
58
57
# Cloud provider
59
58
cloud_name = cloud-name # cloud provider (e.g., aws, azure)
60
59
61
- # 1. Permissions and entitlements for users and groups within Databricks
62
- iam_workspace_groups = {
63
- dev = {
64
- user = [
65
- "user1@example.com",
66
- "user2@example.com"
67
- ]
68
- service_principal = []
69
- entitlements = ["allow_instance_pool_create","allow_cluster_create","databricks_sql_access"]
70
- }
71
- }
72
-
73
- iam_account_groups = [{
74
- group_name = "example-gn"
75
- permissions = ["ADMIN"]
76
- entitlements = [
77
- "allow_instance_pool_create",
78
- "allow_cluster_create",
79
- "databricks_sql_access"
80
- ]
81
- }]
82
-
83
- # 2. Databricks clusters configuration, and assign permission to a custom group on clusters.
60
+ # 1. Databricks clusters configuration, and assign permission to a custom group on clusters.
84
61
databricks_cluster_configs = [ {
85
62
cluster_name = "Unity Catalog"
86
63
data_security_mode = "USER_ISOLATION"
@@ -96,33 +73,33 @@ module "databricks_runtime_premium" {
96
73
permissions = [{group_name = "DEVELOPERS", permission_level = "CAN_MANAGE"}]
97
74
}]
98
75
99
- # 3 . Workspace could be accessed only from these IP Addresses:
76
+ # 2 . Workspace could be accessed only from these IP Addresses:
100
77
ip_rules = {
101
78
"ip_range_1" = "10.128.0.0/16",
102
79
"ip_range_2" = "10.33.0.0/16",
103
80
}
104
81
105
- # 4 . ADLS Gen2 Mount
82
+ # 3 . ADLS Gen2 Mount
106
83
mountpoints = {
107
84
storage_account_name = data.azurerm_storage_account.example.name
108
85
container_name = "example_container"
109
86
}
110
87
111
- # 5 . Create Secret Scope and assign permissions to custom groups
88
+ # 4 . Create Secret Scope and assign permissions to custom groups
112
89
secret_scope = [{
113
90
scope_name = "extra-scope"
114
91
acl = [{ principal = "DEVELOPERS", permission = "READ" }] # Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
115
92
secrets = [{ key = "secret-name", string_value = "secret-value"}]
116
93
}]
117
94
118
- # 6 . SQL Warehouse Endpoint
95
+ # 5 . SQL Warehouse Endpoint
119
96
databricks_sql_endpoint = [{
120
97
name = "default"
121
98
enable_serverless_compute = true
122
99
permissions = [{ group_name = "DEVELOPERS", permission_level = "CAN_USE" },]
123
100
}]
124
101
125
- # 7 . Databricks cluster policies
102
+ # 6 . Databricks cluster policies
126
103
custom_cluster_policies = [{
127
104
name = "custom_policy_1",
128
105
can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
@@ -135,7 +112,7 @@ module "databricks_runtime_premium" {
135
112
}
136
113
}]
137
114
138
- # 8 . Azure Key Vault-backed secret scope
115
+ # 7 . Azure Key Vault-backed secret scope
139
116
key_vault_secret_scope = [{
140
117
name = "external"
141
118
key_vault_id = data.azurerm_key_vault.example.id
0 commit comments