Skip to content

Commit e1e619c

Browse files
author
dmytro_velychko3
committed
Merge branch 'main' into databricks-uniti-cluster
# Conflicts: # README.md
2 parents 01f9431 + 7e19d61 commit e1e619c

File tree

1 file changed

+254
-0
lines changed

1 file changed

+254
-0
lines changed

README.md

Lines changed: 254 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,260 @@
22
Terraform module used for management of Databricks Premium Resources
33

44
## Usage
5+
### **Requires Workspace with "Premium" SKU**
6+
7+
The main idea behind this module is to deploy resources for Databricks Workspace with Premium SKU only.
8+
9+
Here we provide some examples of how to provision it with a different options.
10+
11+
### In example below, these features of given module would be covered:
12+
1. Workspace admins assignment, custom Workspace group creation, group assignments, group entitlements
13+
2. Workspace IP Access list creation
14+
3. SQL Endpoint creation and configuration
15+
4. Create Cluster policy and assign permissions to custom groups
16+
5. Create Secret Scope and assign permissions to custom groups
17+
6. Connect to already existing Unity Catalog Metastore
18+
19+
```hcl
20+
# Prerequisite resources
21+
22+
# Databricks Workspace with Premium SKU
23+
data "azurerm_databricks_workspace" "example" {
24+
name = "example-workspace"
25+
resource_group_name = "example-rg"
26+
}
27+
28+
# Databricks Provider configuration
29+
provider "databricks" {
30+
alias = "main"
31+
host = data.azurerm_databricks_workspace.example.workspace_url
32+
azure_workspace_resource_id = data.azurerm_databricks_workspace.example.id
33+
}
34+
35+
# Key Vault where Service Principal's secrets are stored. Used for mounting Storage Container
36+
data "azurerm_key_vault" "example" {
37+
name = "example-key-vault"
38+
resource_group_name = "example-rg"
39+
}
40+
41+
# Given module is tightly coupled with this "Runtime Premium" module, it's usage is prerequisite.
42+
module "databricks_runtime_core" {
43+
source = "data-platform-hq/databricks-runtime/databricks"
44+
45+
sku = data.databricks_workspace.example.sku
46+
workspace_id = data.databricks_workspace.example.workspace_id
47+
48+
# Parameters of Service principal used for ADLS mount
49+
# Imports App ID and Secret of Service Principal from target Key Vault
50+
key_vault_id = data.azurerm_key_vault.example.id
51+
sp_client_id_secret_name = "sp-client-id" # secret's name that stores Service Principal App ID
52+
sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
53+
tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
54+
55+
# Default cluster parameters
56+
custom_cluster_policies = [{
57+
name = "custom_policy_1",
58+
assigned = true, # automatically assigns this policy to default shared cluster if set 'true'
59+
can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
60+
definition = {
61+
"autoscale.max_workers": {
62+
"type": "range",
63+
"maxValue": 3,
64+
"defaultValue": 2
65+
},
66+
}
67+
}]
68+
69+
# Additional Secret Scope
70+
secret_scope = [{
71+
scope_name = "extra-scope"
72+
# Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
73+
acl = [
74+
{ principal = "DEVELOPERS", permission = "READ" }
75+
]
76+
secrets = [
77+
{ key = "secret-name", string_value = "secret-value"}
78+
]
79+
}]
80+
81+
providers = {
82+
databricks = databricks.main
83+
}
84+
}
85+
86+
# Example usage of module for Runtime Premium resources.
87+
module "databricks_runtime_premium" {
88+
source = "data-platform-hq/databricks-runtime-premium/databricks"
89+
90+
project = "datahq"
91+
env = "example"
92+
location = "eastus"
93+
94+
# Workspace could be accessed only from these IP Addresses:
95+
ip_rules = {
96+
"ip_range_1" = "10.128.0.0/16",
97+
"ip_range_2" = "10.33.0.0/16",
98+
}
99+
100+
# Here is the map of users and theirs object ids.
101+
# This step is optional, in case of Service Principal assignment to workspace,
102+
# please only required to provide APP ID as it's value
103+
user_object_ids = {
104+
"example-service-principal" = "ebfasddf-05sd-4sdc-aasa-ddffgs83c299"
105+
"user1@example.com" = "ebfasddf-05sd-4sdc-aasa-ddffgs83c256"
106+
"user2@example.com" = "ebfasddf-05sd-4sdc-aasa-ddffgs83c865"
107+
}
108+
109+
# To connect to already existing metastore you have to provide it's id.
110+
# An example of new Metastore creation provided below
111+
databricks_external_metastore_id = "<uuid-of-metastore>"
112+
113+
# Workspace admins
114+
workspace_admins = {
115+
user = [
116+
"user1@example.com"
117+
]
118+
service_principal = [
119+
"example-app-id"
120+
]
121+
}
122+
123+
# Custom Workspace group with assigned users/service_principals.
124+
# In addition, provides an ability to create group entitlements and assign permission to a custom group on default cluster.
125+
iam = {
126+
DEVELOPERS = {
127+
user = [
128+
"user1@example.com",
129+
"user2@example.com"
130+
]
131+
"service_principal" = []
132+
entitlements = ["allow_instance_pool_create","allow_cluster_create","databricks_sql_access"]
133+
default_cluster_permission = "CAN_RESTART" # assigns certain permission on default cluster to created group
134+
}
135+
}
136+
137+
# Default cluster params, this cluster created in "Runtime Core module"
138+
default_cluster_id = { default = module.databricks_runtime_core.cluster_id }
139+
140+
# Assigns permission on cluster policy to a custom group ("DEVELOPERS" in this example)
141+
cluster_policies_object = module.databricks_runtime_core.cluster_policies_object
142+
143+
# Assigns acls on secret scope to a custom group ("DEVELOPERS" in this example)
144+
secret_scope_object = module.databricks_runtime_core.secret_scope_object
145+
146+
providers = {
147+
databricks = databricks.main
148+
}
149+
}
150+
```
151+
152+
### Create Unity Catalog metastore
153+
An example below explains to create Unity Catalog Metastore.
154+
It is highly recommended to create Metastore on separate environment or even Azure subscription.
155+
156+
```hcl
157+
# Prerequisite resources
158+
159+
# Databricks Workspace with Premium SKU
160+
data "azurerm_databricks_workspace" "example" {
161+
name = "example-workspace"
162+
resource_group_name = "example-rg"
163+
}
164+
165+
# Databricks Provider configuration
166+
provider "databricks" {
167+
alias = "main"
168+
host = data.azurerm_databricks_workspace.example.workspace_url
169+
azure_workspace_resource_id = data.azurerm_databricks_workspace.example.id
170+
}
171+
172+
# This Access connector cloud be created with Databricks Workspace module
173+
resource "azurerm_databricks_access_connector" "example" {
174+
name = "databrickstest"
175+
resource_group_name = "example-rg"
176+
location = "eastus"
177+
178+
identity {
179+
type = "SystemAssigned"
180+
}
181+
}
182+
183+
# Storage Account where metastore would be created
184+
data "azurerm_storage_account" "example" {
185+
name = "metastore"
186+
resource_group_name = "example-rg"
187+
}
188+
189+
# Example usage of module for Unity Catalog Metastore creation
190+
module "databricks_runtime_premium" {
191+
source = "data-platform-hq/databricks-runtime-premium/databricks"
192+
193+
project = "datahq"
194+
env = "example"
195+
location = "eastus"
196+
197+
ip_rules = {
198+
"example_devops-0" = "10.128.0.0/16",
199+
"example_devops-1" = "10.33.0.0/16",
200+
}
201+
user_object_ids = {
202+
"example-app-id" = "ebfasddf-05sd-4sdc-aasa-ddffgs83c299"
203+
"user1@example.com" = "ebfasddf-05sd-4sdc-aasa-ddffgs83c256"
204+
}
205+
206+
# Unity Catalog
207+
create_metastore = true
208+
metastore_grants = { "account users" = ["CREATE_CATALOG"] }
209+
210+
access_connector_id = azurerm_databricks_access_connector.example.id
211+
workspace_id = data.azurerm_databricks_workspace.example.id
212+
213+
214+
catalog = {
215+
catalog-one-data = {
216+
catalog_grants = {
217+
"user1@example.com" = ["USE_CATALOG", "USE_SCHEMA", "CREATE_SCHEMA", "CREATE_TABLE", "SELECT", "MODIFY"]
218+
"account users" = ["USE_CATALOG", "USE_SCHEMA", "SELECT"]
219+
}
220+
catalog_comment = "This catalog is created by Terraform"
221+
schema_name = ["schema1", "schema2", "schema3"]
222+
schema_grants = {
223+
"account_users" = ["USE_SCHEMA", "CREATE_TABLE","CREATE_VIEW", "MODIFY"]
224+
}
225+
schema_comment = "Created by terraform. Allowed for SELECT operations"
226+
schema_properties = { allowed = "all users"}
227+
}
228+
229+
catalog-two-admin = {
230+
catalog_grants = {
231+
"user1@example.com" = ["USE_CATALOG", "USE_SCHEMA", "CREATE_SCHEMA", "CREATE_TABLE", "SELECT", "MODIFY"]
232+
}
233+
catalog_comment = "This catalog is created by Terraform"
234+
schema_name = ["schema1"]
235+
schema_properties = { allowed = "admin only"}
236+
}
237+
}
238+
239+
# Storage Account where Metastore would be created
240+
storage_account_id = data.azurerm_storage_account.example.id
241+
storage_account_name = data.azurerm_storage_account.example.name
242+
243+
# Permissions
244+
workspace_admins = {
245+
user = [
246+
"user1@example.com",
247+
]
248+
service_principal = [
249+
"example-app-id"
250+
]
251+
}
252+
253+
providers = {
254+
databricks = databricks.main
255+
}
256+
}
257+
```
258+
5259

6260
<!-- BEGIN_TF_DOCS -->
7261
## Requirements

0 commit comments

Comments
 (0)