@@ -17,6 +17,7 @@ Here we provide some examples of how to provision it with a different options.
17
17
6 . Create Cluster policy
18
18
7 . Create an Azure Key Vault-backed secret scope
19
19
20
+ ### Example for Azure Cloud:
20
21
``` hcl
21
22
# Prerequisite resources
22
23
@@ -176,6 +177,175 @@ module "databricks_runtime_premium" {
176
177
177
178
```
178
179
180
+ ### In example below, these features of given module would be covered:
181
+ 1 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
182
+ 2 . Workspace IP Access list creation
183
+ 3 . Create Secret Scope and assign permissions to custom groups
184
+ 4 . SQL Endpoint creation and configuration
185
+ 5 . Create Cluster policy
186
+
187
+ ### Example for AWS Cloud:
188
+ ``` hcl
189
+
190
+ # Prerequisite resources
191
+
192
+ variable "databricks_account_id" {}
193
+ variable "region" {}
194
+
195
+ # Databricks Workspace ID
196
+ data "databricks_mws_workspaces" "example" {
197
+ account_id = var.databricks_account_id
198
+ }
199
+
200
+ # Provider configuration for SSM
201
+ provider "aws" {
202
+ alias = "ssm"
203
+ region = var.region
204
+ }
205
+
206
+ # Databricks Account-Level Provider configuration
207
+ provider "databricks" {
208
+ alias = "mws"
209
+ host = "https://accounts.cloud.databricks.com"
210
+ account_id = data.aws_ssm_parameter.this["databricks_account_id"].value
211
+ client_id = data.aws_ssm_parameter.this["databricks_admin_sp_id"].value
212
+ client_secret = data.aws_ssm_parameter.this["databricks_admin_sp_secret"].value
213
+ }
214
+
215
+ # Databricks Provider configuration
216
+ provider "databricks" {
217
+ alias = "workspace"
218
+ host = module.databricks_workspace.workspace_url
219
+ client_id = data.aws_ssm_parameter.this["databricks_admin_sp_id"].value
220
+ client_secret = data.aws_ssm_parameter.this["databricks_admin_sp_secret"].value
221
+ }
222
+
223
+ locals {
224
+ ssm_parameters = [
225
+ "databricks_account_id",
226
+ "databricks_admin_sp_id",
227
+ "databricks_admin_sp_secret",
228
+ "github_pat_token"
229
+ ]
230
+
231
+ ssm_parameters_prefix = "/example-prefix/" # Prefix for parameters stored in AWS SSM
232
+
233
+ dbx_runtime = {
234
+ iam_account_groups_assignment = [
235
+ { group_name = "example gm1", permissions = ["USER"] },
236
+ { group_name = "example gm2", permissions = ["USER"] }
237
+ ]
238
+
239
+ sql_endpoints = [{
240
+ name = "example_test"
241
+ permissions = [
242
+ { group_name = "example gm1", permission_level = "CAN_MANAGE" },
243
+ ]
244
+ }]
245
+
246
+ clusters = [{
247
+ cluster_name = "example1"
248
+ permissions = [
249
+ { group_name = "example gm2", permission_level = "CAN_RESTART" },
250
+ ]
251
+ }, {
252
+ cluster_name = "example2"
253
+ permissions = [
254
+ { group_name = "example gm2", permission_level = "CAN_RESTART" },
255
+ { group_name = "example gm1", permission_level = "CAN_MANAGE" },
256
+ ]
257
+ }]
258
+ }
259
+
260
+ databricks_custom_cluster_policies = [{
261
+ name = null
262
+ can_use = null
263
+ definition = null
264
+ }]
265
+
266
+ dbx_inputs = {
267
+ vpc_id = "vpc-example"
268
+ subnet_ids = ["subnet-example1", "subnet-example2"]
269
+ security_group_ids = ["sg-example"]
270
+ }
271
+
272
+ iam_default_permission_boundary_policy_arn = "arn:aws:iam::{ AWS Account ID }:policy/eo_role_boundary"
273
+ }
274
+
275
+ # SSM Parameter
276
+ data "aws_ssm_parameter" "this" {
277
+ for_each = local.ssm_parameters
278
+ name = "${local.ssm_parameters_prefix}${each.key}"
279
+ provider = aws.ssm
280
+ }
281
+
282
+ # Label configuration
283
+ module "label" {
284
+ source = "cloudposse/label/null"
285
+ version = "0.25.0"
286
+
287
+ namespace = "example-namespace"
288
+ environment = "example-environment"
289
+ stage = "example-stage"
290
+ }
291
+
292
+ # Databricks Workspace configuration
293
+ module "databricks_workspace" {
294
+ source = "data-platform-hq/aws-workspace/databricks"
295
+ version = "1.0.1"
296
+
297
+ label = module.label.id
298
+ vpc_id = local.dbx_inputs.vpc_id
299
+ subnet_ids = local.dbx_inputs.subnet_ids
300
+ security_group_ids = local.dbx_inputs.security_group_ids
301
+ region = var.region
302
+ account_id = data.aws_ssm_parameter.this["databricks_account_id"].value
303
+ iam_cross_account_workspace_role_config = {
304
+ permission_boundary_arn = local.iam_default_permission_boundary_policy_arn
305
+ }
306
+
307
+ providers = {
308
+ databricks = databricks.mws
309
+ }
310
+ }
311
+
312
+ # Account level group assignment to the Workspace
313
+ module "databricks_account_groups" {
314
+ source = "data-platform-hq/databricks-account-groups/databricks"
315
+ version = "1.0.1"
316
+
317
+ workspace_id = module.databricks_workspace.workspace_id
318
+ workspace_group_assignment = local.dbx_runtime.iam_account_groups_assignment
319
+
320
+ providers = {
321
+ databricks = databricks.mws
322
+ }
323
+ }
324
+
325
+ # Databricks Runtime resources configuration (clusters, sql, secrets, etc.)
326
+ module "databricks_runtime" {
327
+ source = "data-platform-hq/runtime/databricks"
328
+ version = "1.0.0"
329
+
330
+ clusters = local.dbx_runtime.clusters
331
+ sql_endpoint = local.dbx_runtime.sql_endpoints
332
+ secret_scope = flatten([var.dbx_runtime.secret_scopes, local.demo_wwi_secret_scope])
333
+ workspace_admin_token_enabled = var.workspace_admin_token_enabled
334
+ system_schemas_enabled = alltrue([var.databricks_system_schemas_enabled])
335
+
336
+ iam_account_groups = local.dbx_runtime.iam_account_groups_assignment
337
+ cloud_name = "aws"
338
+ custom_cluster_policies = local.databricks_custom_cluster_policies
339
+
340
+ providers = {
341
+ databricks = databricks.workspace
342
+ }
343
+
344
+ depends_on = [module.databricks_workspace, module.databricks_account_groups]
345
+ }
346
+
347
+ ```
348
+
179
349
<!-- BEGIN_TF_DOCS -->
180
350
## Requirements
181
351
0 commit comments