Skip to content

Commit b0c11dd

Browse files
MyroslavLevchykMyroslavLevchyk
authored andcommitted
upd
1 parent 4c02de1 commit b0c11dd

File tree

1 file changed

+64
-39
lines changed

1 file changed

+64
-39
lines changed

README.md

Lines changed: 64 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -9,70 +9,96 @@ The main idea behind this module is to deploy resources for Databricks Workspace
99
Here we provide some examples of how to provision it with a different options.
1010

1111
### In example below, these features of given module would be covered:
12-
1. Permissions and entitlements for users and groups within Databricks
13-
2. Clusters (i.e., for Unity Catalog and Shared Autoscaling)
14-
3. Workspace IP Access list creation
15-
4. ADLS Gen2 Mount
16-
5. Create Secret Scope and assign permissions to custom groups
17-
6. SQL Endpoint creation and configuration
18-
7. Create Cluster policy
19-
8. Create an Azure Key Vault-backed secret scope
12+
1. Clusters (i.e., for Unity Catalog and Shared Autoscaling)
13+
2. Workspace IP Access list creation
14+
3. ADLS Gen2 Mount
15+
4. Create Secret Scope and assign permissions to custom groups
16+
5. SQL Endpoint creation and configuration
17+
6. Create Cluster policy
18+
7. Create an Azure Key Vault-backed secret scope
2019

2120
```hcl
2221
# Prerequisite resources
2322
23+
variable "databricks_account_id" {}
24+
variable "cloud_name" {} # cloud provider (e.g., aws, azure)
25+
2426
# Databricks Workspace with Premium SKU
2527
data "azurerm_databricks_workspace" "example" {
2628
name = "example-workspace"
2729
resource_group_name = "example-rg"
2830
}
2931
32+
# Databricks Workspace ID for Azure
33+
data "azurerm_databricks_workspace" "example" {
34+
name = "example-workspace"
35+
resource_group_name = "example-rg"
36+
}
37+
38+
# Databricks Workspace ID for AWS
39+
data "databricks_mws_workspaces" "example" {
40+
account_id = var.databricks_account_id
41+
}
42+
3043
# Databricks Provider configuration
3144
provider "databricks" {
3245
alias = "main"
3346
host = data.azurerm_databricks_workspace.example.workspace_url
3447
azure_workspace_resource_id = data.azurerm_databricks_workspace.example.id
3548
}
3649
50+
# Databricks Account-Level Provider configuration
51+
provider "databricks" {
52+
alias = "account"
53+
host = "https://accounts.azuredatabricks.net"
54+
account_id = var.databricks_account_id
55+
}
56+
3757
# Key Vault where Service Principal's secrets are stored. Used for mounting Storage Container
3858
data "azurerm_key_vault" "example" {
3959
name = "example-key-vault"
4060
resource_group_name = "example-rg"
4161
}
4262
63+
locals {
64+
databricks_iam_account_groups = [{
65+
group_name = "example-gn"
66+
permissions = ["ADMIN"]
67+
entitlements = [
68+
"allow_instance_pool_create",
69+
"allow_cluster_create",
70+
"databricks_sql_access"
71+
]
72+
}]
73+
}
74+
4375
# Assigns Databricks Account groups to Workspace. It is required to assign Unity Catalog Metastore before assigning Account groups to Workspace
4476
module "databricks_account_groups" {
45-
count = length(var.databricks_iam_account_groups) != 0 ? 1 : 0
77+
count = length(local.databricks_iam_account_groups) != 0 ? 1 : 0
4678
source = "data-platform-hq/databricks-account-groups/databricks"
4779
version = "1.0.1"
48-
49-
workspace_id = module.databricks_workspace.workspace_id
50-
workspace_group_assignment = var.databricks_iam_account_groups
80+
81+
workspace_id = var.cloud_provider == "azure" ? data.azurerm_databricks_workspace.example.id : data.databricks_mws_workspaces.example.workspaces[0].workspace_id
82+
workspace_group_assignment = local.databricks_iam_account_groups
5183
5284
providers = {
5385
databricks = databricks.account
5486
}
5587
}
5688
5789
# Example usage of module for Runtime Premium resources.
58-
module "databricks_runtime_premium" {
59-
source = "data-platform-hq/databricks-runtime-premium/databricks"
90+
module "databricks_runtime_premium" {
91+
source = "data-platform-hq/runtime/databricks"
92+
version = "~>1.0"
6093
6194
project = "datahq"
6295
env = "example"
6396
location = "eastus"
6497
65-
# Parameters of Service principal used for ADLS mount
66-
# Imports App ID and Secret of Service Principal from target Key Vault
67-
key_vault_id = data.azurerm_key_vault.example.id
68-
sp_client_id_secret_name = "sp-client-id" # secret's name that stores Service Principal App ID
69-
sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
70-
tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
71-
7298
# Cloud provider
73-
cloud_name = cloud-name # cloud provider (e.g., aws, azure)
99+
cloud_name = var.cloud_name
74100
75-
# 1. Permissions and entitlements for users and groups within Databricks
101+
# Example configuration for Workspace Groups
76102
iam_workspace_groups = {
77103
dev = {
78104
user = [
@@ -84,17 +110,10 @@ module "databricks_runtime_premium" {
84110
}
85111
}
86112
87-
iam_account_groups = [{
88-
group_name = "example-gn"
89-
permissions = ["ADMIN"]
90-
entitlements = [
91-
"allow_instance_pool_create",
92-
"allow_cluster_create",
93-
"databricks_sql_access"
94-
]
95-
}]
113+
# Example configuration for Account Groups
114+
iam_account_groups = local.databricks_iam_account_groups
96115
97-
# 2. Databricks clusters configuration, and assign permission to a custom group on clusters.
116+
# 1. Databricks clusters configuration, and assign permission to a custom group on clusters.
98117
databricks_cluster_configs = [ {
99118
cluster_name = "Unity Catalog"
100119
data_security_mode = "USER_ISOLATION"
@@ -110,33 +129,39 @@ module "databricks_runtime_premium" {
110129
permissions = [{group_name = "DEVELOPERS", permission_level = "CAN_MANAGE"}]
111130
}]
112131
113-
# 3. Workspace could be accessed only from these IP Addresses:
132+
# 2. Workspace could be accessed only from these IP Addresses:
114133
ip_rules = {
115134
"ip_range_1" = "10.128.0.0/16",
116135
"ip_range_2" = "10.33.0.0/16",
117136
}
118137
119-
# 4. ADLS Gen2 Mount
138+
# 3. ADLS Gen2 Mount
120139
mountpoints = {
121140
storage_account_name = data.azurerm_storage_account.example.name
122141
container_name = "example_container"
123142
}
124143
125-
# 5. Create Secret Scope and assign permissions to custom groups
144+
# Parameters of Service principal used for ADLS mount
145+
# Imports App ID and Secret of Service Principal from target Key Vault
146+
sp_client_id_secret_name = "sp-client-id" # secret's name that stores Service Principal App ID
147+
sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
148+
tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
149+
150+
# 4. Create Secret Scope and assign permissions to custom groups
126151
secret_scope = [{
127152
scope_name = "extra-scope"
128153
acl = [{ principal = "DEVELOPERS", permission = "READ" }] # Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
129154
secrets = [{ key = "secret-name", string_value = "secret-value"}]
130155
}]
131156
132-
# 6. SQL Warehouse Endpoint
157+
# 5. SQL Warehouse Endpoint
133158
databricks_sql_endpoint = [{
134159
name = "default"
135160
enable_serverless_compute = true
136161
permissions = [{ group_name = "DEVELOPERS", permission_level = "CAN_USE" },]
137162
}]
138163
139-
# 7. Databricks cluster policies
164+
# 6. Databricks cluster policies
140165
custom_cluster_policies = [{
141166
name = "custom_policy_1",
142167
can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
@@ -149,7 +174,7 @@ module "databricks_runtime_premium" {
149174
}
150175
}]
151176
152-
# 8. Azure Key Vault-backed secret scope
177+
# 7. Azure Key Vault-backed secret scope
153178
key_vault_secret_scope = [{
154179
name = "external"
155180
key_vault_id = data.azurerm_key_vault.example.id

0 commit comments

Comments
 (0)