From e08e9f13e999693686e89b4ec300f3af5ad32322 Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 26 Feb 2025 18:12:54 +0100 Subject: [PATCH 1/3] Use black --- CONTRIBUTING.md | 3 +- Makefile | 4 +- databricks/sdk/__init__.py | 318 +- databricks/sdk/_base_client.py | 190 +- databricks/sdk/_property.py | 19 +- databricks/sdk/_widgets/__init__.py | 15 +- .../sdk/_widgets/default_widgets_utils.py | 36 +- databricks/sdk/_widgets/ipywidgets_utils.py | 69 +- databricks/sdk/azure.py | 14 +- databricks/sdk/casing.py | 10 +- databricks/sdk/config.py | 207 +- databricks/sdk/core.py | 104 +- databricks/sdk/credentials_provider.py | 501 +- databricks/sdk/data_plane.py | 11 +- databricks/sdk/dbutils.py | 210 +- databricks/sdk/environments.py | 85 +- databricks/sdk/errors/base.py | 51 +- databricks/sdk/errors/customizer.py | 6 +- databricks/sdk/errors/deserializer.py | 63 +- databricks/sdk/errors/mapper.py | 2 +- databricks/sdk/errors/overrides.py | 51 +- databricks/sdk/errors/parser.py | 38 +- databricks/sdk/errors/platform.py | 20 +- databricks/sdk/errors/private_link.py | 48 +- databricks/sdk/logger/round_trip_logger.py | 44 +- databricks/sdk/mixins/compute.py | 150 +- databricks/sdk/mixins/files.py | 351 +- databricks/sdk/mixins/jobs.py | 120 +- databricks/sdk/mixins/open_ai_client.py | 46 +- databricks/sdk/mixins/workspace.py | 75 +- databricks/sdk/oauth.py | 443 +- databricks/sdk/retries.py | 26 +- databricks/sdk/runtime/__init__.py | 49 +- databricks/sdk/runtime/dbutils_stub.py | 91 +- databricks/sdk/service/_internal.py | 19 +- databricks/sdk/service/apps.py | 1082 +- databricks/sdk/service/billing.py | 1458 +- databricks/sdk/service/catalog.py | 11209 ++++++++++------ databricks/sdk/service/cleanrooms.py | 940 +- databricks/sdk/service/compute.py | 8456 +++++++----- databricks/sdk/service/dashboards.py | 1973 ++- databricks/sdk/service/files.py | 745 +- databricks/sdk/service/iam.py | 3742 ++++-- databricks/sdk/service/jobs.py | 6462 +++++---- databricks/sdk/service/marketplace.py | 3865 ++++-- databricks/sdk/service/ml.py | 5459 +++++--- databricks/sdk/service/oauth2.py | 1436 +- databricks/sdk/service/pipelines.py | 3036 +++-- databricks/sdk/service/provisioning.py | 2492 ++-- databricks/sdk/service/serving.py | 3219 +++-- databricks/sdk/service/settings.py | 4902 ++++--- databricks/sdk/service/sharing.py | 2080 +-- databricks/sdk/service/sql.py | 7776 ++++++----- databricks/sdk/service/vectorsearch.py | 1388 +- databricks/sdk/service/workspace.py | 2163 +-- databricks/sdk/useragent.py | 58 +- databricks/sdk/version.py | 2 +- examples/account/budgets/create_budgets.py | 50 +- examples/account/budgets/get_budgets.py | 50 +- examples/account/budgets/update_budgets.py | 76 +- .../account/credentials/create_credentials.py | 8 +- .../credentials/create_log_delivery.py | 8 +- .../account/credentials/create_workspaces.py | 8 +- .../account/credentials/get_credentials.py | 8 +- .../encryption_keys/create_encryption_keys.py | 10 +- .../encryption_keys/get_encryption_keys.py | 10 +- .../log_delivery/create_log_delivery.py | 35 +- .../account/log_delivery/get_log_delivery.py | 35 +- examples/account/networks/create_networks.py | 11 +- examples/account/networks/get_networks.py | 11 +- .../private_access/create_private_access.py | 6 +- .../private_access/get_private_access.py | 6 +- .../private_access/replace_private_access.py | 14 +- .../create_account_service_principal.py | 2 +- .../create_workspace_assignment.py | 2 +- .../create_workspace_assignment_on_aws.py | 2 +- .../get_account_service_principal.py | 2 +- .../list_account_service_principal.py | 2 +- .../patch_account_service_principal.py | 10 +- .../update_account_service_principal.py | 2 +- .../account/storage/create_log_delivery.py | 6 +- examples/account/storage/create_storage.py | 6 +- examples/account/storage/create_workspaces.py | 5 +- examples/account/storage/get_storage.py | 6 +- .../account/users/create_account_users.py | 5 +- .../account/users/delete_account_users.py | 5 +- examples/account/users/get_account_users.py | 5 +- examples/account/users/patch_account_users.py | 21 +- .../vpc_endpoints/create_vpc_endpoints.py | 8 +- .../vpc_endpoints/get_vpc_endpoints.py | 8 +- .../update_workspace_assignment.py | 10 +- .../update_workspace_assignment_on_aws.py | 10 +- .../account/workspaces/create_workspaces.py | 23 +- .../account/workspaces/update_workspaces.py | 13 +- examples/external_browser_auth.py | 32 +- examples/flask_app_with_oauth.py | 63 +- examples/last_job_runs.py | 23 +- examples/list_compute_submitrun_runs.py | 26 +- examples/starting_job_and_waiting.py | 40 +- examples/workspace/alerts/create_alerts.py | 30 +- examples/workspace/alerts/get_alerts.py | 30 +- examples/workspace/alerts/update_alerts.py | 38 +- .../create_catalog_workspace_bindings.py | 2 +- .../workspace/catalogs/create_catalogs.py | 2 +- examples/workspace/catalogs/create_schemas.py | 2 +- examples/workspace/catalogs/create_shares.py | 2 +- examples/workspace/catalogs/create_tables.py | 2 +- examples/workspace/catalogs/create_volumes.py | 2 +- examples/workspace/catalogs/get_catalogs.py | 2 +- .../update_catalog_workspace_bindings.py | 2 +- .../workspace/catalogs/update_catalogs.py | 2 +- .../create_cluster_policies.py | 8 +- .../cluster_policies/edit_cluster_policies.py | 18 +- .../cluster_policies/get_cluster_policies.py | 8 +- .../change_owner_clusters_api_integration.py | 16 +- .../create_clusters_api_integration.py | 14 +- .../delete_clusters_api_integration.py | 14 +- .../clusters/edit_clusters_api_integration.py | 28 +- .../events_clusters_api_integration.py | 14 +- .../clusters/get_clusters_api_integration.py | 14 +- .../clusters/pin_clusters_api_integration.py | 14 +- .../resize_clusters_api_integration.py | 14 +- .../restart_clusters_api_integration.py | 14 +- .../start_clusters_api_integration.py | 14 +- .../unpin_clusters_api_integration.py | 14 +- .../execute_commands_direct_usage.py | 10 +- .../command_executor/execute_commands.py | 5 +- .../connections/create_connections.py | 21 +- .../workspace/connections/get_connections.py | 38 +- .../connections/update_connections.py | 38 +- .../workspace/dashboards/create_dashboards.py | 2 +- .../workspace/dashboards/delete_dashboards.py | 2 +- .../workspace/dashboards/get_dashboards.py | 2 +- .../dashboards/restore_dashboards.py | 2 +- examples/workspace/databricks/must_tokens.py | 5 +- examples/workspace/dbfs/download_file.py | 8 +- .../create_experiment_experiments.py | 2 +- .../create_experiment_m_lflow_runs.py | 2 +- .../experiments/create_run_m_lflow_runs.py | 8 +- .../experiments/get_experiment_experiments.py | 2 +- .../update_experiment_experiments.py | 4 +- .../experiments/update_run_m_lflow_runs.py | 8 +- .../create_external_locations.py | 13 +- .../create_external_locations_on_aws.py | 13 +- .../external_locations/create_volumes.py | 16 +- .../get_external_locations.py | 13 +- .../get_external_locations_on_aws.py | 13 +- .../update_external_locations.py | 21 +- .../update_external_locations_on_aws.py | 21 +- .../git_credentials/update_git_credentials.py | 10 +- .../create_global_init_scripts.py | 10 +- .../get_global_init_scripts.py | 10 +- .../update_global_init_scripts.py | 18 +- .../workspace/grants/get_effective_tables.py | 27 +- examples/workspace/grants/update_tables.py | 38 +- .../groups/create_generic_permissions.py | 2 +- examples/workspace/groups/create_groups.py | 2 +- examples/workspace/groups/create_secrets.py | 2 +- .../groups/delete_generic_permissions.py | 2 +- examples/workspace/groups/delete_groups.py | 2 +- examples/workspace/groups/delete_secrets.py | 2 +- examples/workspace/groups/get_groups.py | 2 +- examples/workspace/groups/patch_groups.py | 25 +- .../instance_pools/create_instance_pools.py | 2 +- .../instance_pools/edit_instance_pools.py | 10 +- .../instance_pools/get_instance_pools.py | 2 +- .../add_aws_instance_profiles.py | 8 +- .../edit_aws_instance_profiles.py | 5 +- .../ip_access_lists/create_ip_access_lists.py | 8 +- .../ip_access_lists/get_ip_access_lists.py | 8 +- .../replace_ip_access_lists.py | 20 +- ...ncel_all_runs_jobs_api_full_integration.py | 27 +- .../cancel_run_jobs_api_full_integration.py | 31 +- .../jobs/create_jobs_api_full_integration.py | 27 +- .../export_run_jobs_api_full_integration.py | 31 +- .../jobs/get_jobs_api_full_integration.py | 27 +- ...et_run_output_jobs_api_full_integration.py | 23 +- .../list_runs_jobs_api_full_integration.py | 27 +- .../repair_run_jobs_api_full_integration.py | 37 +- .../jobs/reset_jobs_api_full_integration.py | 40 +- .../jobs/run_now_jobs_api_full_integration.py | 27 +- .../jobs/submit_jobs_api_full_integration.py | 23 +- .../jobs/update_jobs_api_full_integration.py | 42 +- .../workspace/libraries/update_libraries.py | 13 +- .../workspace/metastores/assign_metastores.py | 7 +- .../workspace/metastores/create_metastores.py | 7 +- .../enable_optimization_metastores.py | 7 +- .../workspace/metastores/get_metastores.py | 7 +- .../metastores/maintenance_metastores.py | 7 +- .../metastores/unassign_metastores.py | 7 +- .../workspace/metastores/update_metastores.py | 9 +- .../create_comment_model_version_comments.py | 10 +- .../create_model_model_version_comments.py | 2 +- .../create_model_model_versions.py | 2 +- .../model_registry/create_model_models.py | 2 +- ...te_model_version_model_version_comments.py | 2 +- .../create_model_version_model_versions.py | 2 +- .../create_webhook_registry_webhooks.py | 8 +- .../model_registry/get_model_models.py | 2 +- .../update_comment_model_version_comments.py | 12 +- .../model_registry/update_model_models.py | 8 +- .../update_model_version_model_versions.py | 10 +- .../update_webhook_registry_webhooks.py | 10 +- .../permissions/get_generic_permissions.py | 2 +- ...t_permission_levels_generic_permissions.py | 5 +- .../permissions/set_generic_permissions.py | 20 +- .../workspace/pipelines/create_pipelines.py | 21 +- examples/workspace/pipelines/get_pipelines.py | 21 +- .../list_pipeline_events_pipelines.py | 21 +- .../workspace/pipelines/update_pipelines.py | 40 +- .../workspace/providers/create_providers.py | 2 +- examples/workspace/providers/get_providers.py | 2 +- .../providers/list_shares_providers.py | 2 +- .../workspace/providers/update_providers.py | 2 +- examples/workspace/queries/create_alerts.py | 12 +- examples/workspace/queries/create_queries.py | 12 +- examples/workspace/queries/get_queries.py | 12 +- examples/workspace/queries/update_queries.py | 26 +- .../query_history/list_sql_query_history.py | 7 +- .../workspace/recipients/create_recipients.py | 2 +- .../workspace/recipients/get_recipients.py | 2 +- .../recipients/rotate_token_recipients.py | 2 +- .../share_permissions_recipients.py | 2 +- .../workspace/recipients/update_recipients.py | 4 +- examples/workspace/repos/create_repos.py | 8 +- examples/workspace/repos/get_repos.py | 8 +- examples/workspace/repos/update_repos.py | 8 +- examples/workspace/schemas/create_schemas.py | 4 +- examples/workspace/schemas/create_shares.py | 4 +- examples/workspace/schemas/create_tables.py | 4 +- examples/workspace/schemas/create_volumes.py | 4 +- examples/workspace/schemas/get_schemas.py | 4 +- examples/workspace/schemas/list_schemas.py | 2 +- examples/workspace/schemas/update_schemas.py | 6 +- .../workspace/secrets/create_scope_secrets.py | 4 +- .../workspace/secrets/list_acls_secrets.py | 4 +- .../workspace/secrets/list_secrets_secrets.py | 4 +- examples/workspace/secrets/put_acl_secrets.py | 12 +- .../workspace/secrets/put_secret_secrets.py | 6 +- .../create_create_obo_token_on_aws.py | 6 +- .../create_service_principals_on_aws.py | 2 +- .../get_service_principals_on_aws.py | 2 +- .../patch_service_principals_on_aws.py | 10 +- .../update_service_principals_on_aws.py | 10 +- examples/workspace/shares/create_shares.py | 2 +- examples/workspace/shares/get_shares.py | 2 +- examples/workspace/shares/update_shares.py | 39 +- .../statement_execution/execute_shares.py | 10 +- .../statement_execution/execute_tables.py | 16 +- .../create_external_locations.py | 5 +- .../create_external_locations_on_aws.py | 5 +- .../create_storage_credentials.py | 5 +- .../create_storage_credentials_on_aws.py | 5 +- .../storage_credentials/create_volumes.py | 5 +- .../get_storage_credentials.py | 5 +- .../get_storage_credentials_on_aws.py | 5 +- .../update_storage_credentials.py | 10 +- .../update_storage_credentials_on_aws.py | 10 +- examples/workspace/tables/get_tables.py | 22 +- .../workspace/tables/list_summaries_tables.py | 7 +- examples/workspace/tables/list_tables.py | 4 +- ...reate_obo_token_create_obo_token_on_aws.py | 6 +- .../get_create_obo_token_on_aws.py | 6 +- examples/workspace/tokens/create_tokens.py | 2 +- examples/workspace/tokens/get_tokens.py | 2 +- .../users/create_clusters_api_integration.py | 2 +- examples/workspace/users/create_users.py | 5 +- .../workspace/users/create_workspace_users.py | 5 +- .../users/delete_clusters_api_integration.py | 2 +- examples/workspace/users/delete_users.py | 5 +- .../workspace/users/delete_workspace_users.py | 5 +- examples/workspace/users/get_users.py | 5 +- .../workspace/users/get_workspace_users.py | 5 +- examples/workspace/users/list_users.py | 8 +- .../workspace/users/list_workspace_users.py | 8 +- .../workspace/users/patch_workspace_users.py | 13 +- .../workspace/users/update_workspace_users.py | 5 +- examples/workspace/volumes/create_volumes.py | 40 +- examples/workspace/volumes/list_volumes.py | 4 +- examples/workspace/volumes/read_volumes.py | 40 +- examples/workspace/volumes/update_volumes.py | 40 +- .../warehouses/create_sql_warehouses.py | 7 +- .../warehouses/edit_sql_warehouses.py | 19 +- .../warehouses/get_sql_warehouses.py | 7 +- examples/workspace/workspace/download_file.py | 8 +- .../export__workspace_integration.py | 2 +- .../workspace/export_workspace_integration.py | 2 +- .../get_status_generic_permissions.py | 2 +- .../get_status_workspace_integration.py | 2 +- .../workspace/import__generic_permissions.py | 20 +- .../import__jobs_api_full_integration.py | 20 +- .../workspace/workspace/import__pipelines.py | 14 +- .../import__workspace_integration.py | 15 +- .../workspace/import_generic_permissions.py | 20 +- .../import_jobs_api_full_integration.py | 20 +- .../workspace/workspace/import_pipelines.py | 14 +- .../workspace/import_workspace_integration.py | 15 +- .../workspace/workspace/list_recursive.py | 4 +- .../workspace/list_workspace_integration.py | 2 +- .../workspace/workspace/upload_notebook.py | 8 +- .../get_catalog_workspace_bindings.py | 2 +- .../update_catalog_workspace_bindings.py | 2 +- pyproject.toml | 14 +- setup.cfg | 5 +- tests/conftest.py | 37 +- tests/fixture_server.py | 18 +- tests/integration/conftest.py | 83 +- tests/integration/test_auth.py | 130 +- tests/integration/test_client.py | 4 +- tests/integration/test_clusters.py | 20 +- tests/integration/test_commands.py | 4 +- tests/integration/test_dbconnect.py | 18 +- tests/integration/test_dbutils.py | 46 +- tests/integration/test_deployment.py | 4 +- tests/integration/test_external_browser.py | 38 +- tests/integration/test_files.py | 263 +- tests/integration/test_iam.py | 40 +- tests/integration/test_jobs.py | 65 +- tests/integration/test_repos.py | 2 +- tests/integration/test_sql.py | 10 +- tests/integration/test_workspace.py | 55 +- tests/test_auth.py | 250 +- tests/test_auth_manual_tests.py | 104 +- tests/test_base_client.py | 361 +- tests/test_compute_mixins.py | 21 +- tests/test_config.py | 122 +- tests/test_core.py | 264 +- tests/test_credentials_provider.py | 90 +- tests/test_data_plane.py | 36 +- tests/test_dbfs_mixins.py | 92 +- tests/test_dbutils.py | 252 +- tests/test_environments.py | 6 +- tests/test_errors.py | 286 +- tests/test_files.py | 350 +- tests/test_init_file.py | 6 +- tests/test_internal.py | 31 +- tests/test_jobs.py | 84 +- tests/test_jobs_mixin.py | 1095 +- tests/test_metadata_service_auth.py | 48 +- tests/test_misc.py | 53 +- tests/test_model_serving_auth.py | 167 +- tests/test_oauth.py | 112 +- tests/test_open_ai_mixin.py | 36 +- tests/test_refreshable.py | 122 +- tests/test_retries.py | 20 +- tests/test_user_agent.py | 37 +- tests/testdata/test_casing.py | 16 +- 347 files changed, 52498 insertions(+), 32735 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 249d7498e..c398f43f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,7 +13,8 @@ If it is appropriate to write a design document, the document must be hosted eit Small patches and bug fixes don't need prior communication. ## Coding Style -Code style is enforced by a formatter check in your pull request. We use [yapf](https://github.com/google/yapf) to format our code. Run `make fmt` to ensure your code is properly formatted prior to raising a pull request. + +Code style is enforced by a formatter check in your pull request. We use [Black](https://github.com/psf/black) to format our code. Run `make fmt` to ensure your code is properly formatted prior to raising a pull request. ## Signed Commits This repo requires all contributors to sign their commits. To configure this, you can follow [Github's documentation](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) to create a GPG key, upload it to your Github account, and configure your git client to sign commits. diff --git a/Makefile b/Makefile index 8c0be4b8a..c147f4074 100644 --- a/Makefile +++ b/Makefile @@ -11,12 +11,12 @@ install: pip install . fmt: - yapf -pri databricks tests + black databricks tests autoflake -ri databricks tests isort databricks tests fmte: - yapf -pri examples + black examples autoflake -ri examples isort examples diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 79b1c3353..81ffbdf24 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -122,6 +122,7 @@ def _make_dbutils(config: client.Config): # We are in runtime, so we can use the runtime dbutils from databricks.sdk.runtime import dbutils as runtime_dbutils + return runtime_dbutils @@ -138,58 +139,62 @@ class WorkspaceClient: The WorkspaceClient is a client for the workspace-level Databricks REST API. """ - def __init__(self, - *, - host: Optional[str] = None, - account_id: Optional[str] = None, - username: Optional[str] = None, - password: Optional[str] = None, - client_id: Optional[str] = None, - client_secret: Optional[str] = None, - token: Optional[str] = None, - profile: Optional[str] = None, - config_file: Optional[str] = None, - azure_workspace_resource_id: Optional[str] = None, - azure_client_secret: Optional[str] = None, - azure_client_id: Optional[str] = None, - azure_tenant_id: Optional[str] = None, - azure_environment: Optional[str] = None, - auth_type: Optional[str] = None, - cluster_id: Optional[str] = None, - google_credentials: Optional[str] = None, - google_service_account: Optional[str] = None, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - config: Optional[client.Config] = None): + def __init__( + self, + *, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, + product="unknown", + product_version="0.0.0", + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None, + ): if not config: - config = client.Config(host=host, - account_id=account_id, - username=username, - password=password, - client_id=client_id, - client_secret=client_secret, - token=token, - profile=profile, - config_file=config_file, - azure_workspace_resource_id=azure_workspace_resource_id, - azure_client_secret=azure_client_secret, - azure_client_id=azure_client_id, - azure_tenant_id=azure_tenant_id, - azure_environment=azure_environment, - auth_type=auth_type, - cluster_id=cluster_id, - google_credentials=google_credentials, - google_service_account=google_service_account, - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version) + config = client.Config( + host=host, + account_id=account_id, + username=username, + password=password, + client_id=client_id, + client_secret=client_secret, + token=token, + profile=profile, + config_file=config_file, + azure_workspace_resource_id=azure_workspace_resource_id, + azure_client_secret=azure_client_secret, + azure_client_id=azure_client_id, + azure_tenant_id=azure_tenant_id, + azure_environment=azure_environment, + auth_type=auth_type, + cluster_id=cluster_id, + google_credentials=google_credentials, + google_service_account=google_service_account, + credentials_strategy=credentials_strategy, + credentials_provider=credentials_provider, + debug_truncate_bytes=debug_truncate_bytes, + debug_headers=debug_headers, + product=product, + product_version=product_version, + ) self._config = config.copy() self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) @@ -212,7 +217,8 @@ def __init__(self, self._consumer_installations = service.marketplace.ConsumerInstallationsAPI(self._api_client) self._consumer_listings = service.marketplace.ConsumerListingsAPI(self._api_client) self._consumer_personalization_requests = service.marketplace.ConsumerPersonalizationRequestsAPI( - self._api_client) + self._api_client + ) self._consumer_providers = service.marketplace.ConsumerProvidersAPI(self._api_client) self._credentials = service.catalog.CredentialsAPI(self._api_client) self._credentials_manager = service.settings.CredentialsManagerAPI(self._api_client) @@ -246,8 +252,7 @@ def __init__(self, self._permission_migration = service.iam.PermissionMigrationAPI(self._api_client) self._permissions = service.iam.PermissionsAPI(self._api_client) self._pipelines = service.pipelines.PipelinesAPI(self._api_client) - self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI( - self._api_client) + self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI(self._api_client) self._policy_compliance_for_jobs = service.jobs.PolicyComplianceForJobsAPI(self._api_client) self._policy_families = service.compute.PolicyFamiliesAPI(self._api_client) self._provider_exchange_filters = service.marketplace.ProviderExchangeFiltersAPI(self._api_client) @@ -255,9 +260,11 @@ def __init__(self, self._provider_files = service.marketplace.ProviderFilesAPI(self._api_client) self._provider_listings = service.marketplace.ProviderListingsAPI(self._api_client) self._provider_personalization_requests = service.marketplace.ProviderPersonalizationRequestsAPI( - self._api_client) + self._api_client + ) self._provider_provider_analytics_dashboards = service.marketplace.ProviderProviderAnalyticsDashboardsAPI( - self._api_client) + self._api_client + ) self._provider_providers = service.marketplace.ProviderProvidersAPI(self._api_client) self._providers = service.sharing.ProvidersAPI(self._api_client) self._quality_monitors = service.catalog.QualityMonitorsAPI(self._api_client) @@ -278,7 +285,8 @@ def __init__(self, self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client) self._serving_endpoints = serving_endpoints self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI( - self._api_client, serving_endpoints) + self._api_client, serving_endpoints + ) self._settings = service.settings.SettingsAPI(self._api_client) self._shares = service.sharing.SharesAPI(self._api_client) self._statement_execution = service.sql.StatementExecutionAPI(self._api_client) @@ -316,7 +324,9 @@ def access_control(self) -> service.iam.AccessControlAPI: return self._access_control @property - def account_access_control_proxy(self) -> service.iam.AccountAccessControlProxyAPI: + def account_access_control_proxy( + self, + ) -> service.iam.AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account.""" return self._account_access_control_proxy @@ -381,12 +391,16 @@ def connections(self) -> service.catalog.ConnectionsAPI: return self._connections @property - def consumer_fulfillments(self) -> service.marketplace.ConsumerFulfillmentsAPI: + def consumer_fulfillments( + self, + ) -> service.marketplace.ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" return self._consumer_fulfillments @property - def consumer_installations(self) -> service.marketplace.ConsumerInstallationsAPI: + def consumer_installations( + self, + ) -> service.marketplace.ConsumerInstallationsAPI: """Installations are entities that allow consumers to interact with Databricks Marketplace listings.""" return self._consumer_installations @@ -396,7 +410,9 @@ def consumer_listings(self) -> service.marketplace.ConsumerListingsAPI: return self._consumer_listings @property - def consumer_personalization_requests(self) -> service.marketplace.ConsumerPersonalizationRequestsAPI: + def consumer_personalization_requests( + self, + ) -> service.marketplace.ConsumerPersonalizationRequestsAPI: """Personalization Requests allow customers to interact with the individualized Marketplace listing flow.""" return self._consumer_personalization_requests @@ -541,7 +557,9 @@ def model_versions(self) -> service.catalog.ModelVersionsAPI: return self._model_versions @property - def notification_destinations(self) -> service.settings.NotificationDestinationsAPI: + def notification_destinations( + self, + ) -> service.settings.NotificationDestinationsAPI: """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" return self._notification_destinations @@ -566,12 +584,16 @@ def pipelines(self) -> service.pipelines.PipelinesAPI: return self._pipelines @property - def policy_compliance_for_clusters(self) -> service.compute.PolicyComplianceForClustersAPI: + def policy_compliance_for_clusters( + self, + ) -> service.compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" return self._policy_compliance_for_clusters @property - def policy_compliance_for_jobs(self) -> service.jobs.PolicyComplianceForJobsAPI: + def policy_compliance_for_jobs( + self, + ) -> service.jobs.PolicyComplianceForJobsAPI: """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.""" return self._policy_compliance_for_jobs @@ -581,7 +603,9 @@ def policy_families(self) -> service.compute.PolicyFamiliesAPI: return self._policy_families @property - def provider_exchange_filters(self) -> service.marketplace.ProviderExchangeFiltersAPI: + def provider_exchange_filters( + self, + ) -> service.marketplace.ProviderExchangeFiltersAPI: """Marketplace exchanges filters curate which groups can access an exchange.""" return self._provider_exchange_filters @@ -601,13 +625,16 @@ def provider_listings(self) -> service.marketplace.ProviderListingsAPI: return self._provider_listings @property - def provider_personalization_requests(self) -> service.marketplace.ProviderPersonalizationRequestsAPI: + def provider_personalization_requests( + self, + ) -> service.marketplace.ProviderPersonalizationRequestsAPI: """Personalization requests are an alternate to instantly available listings.""" return self._provider_personalization_requests @property def provider_provider_analytics_dashboards( - self) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI: + self, + ) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI: """Manage templated analytics solution for providers.""" return self._provider_provider_analytics_dashboards @@ -652,7 +679,9 @@ def query_visualizations(self) -> service.sql.QueryVisualizationsAPI: return self._query_visualizations @property - def query_visualizations_legacy(self) -> service.sql.QueryVisualizationsLegacyAPI: + def query_visualizations_legacy( + self, + ) -> service.sql.QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" return self._query_visualizations_legacy @@ -707,7 +736,9 @@ def serving_endpoints(self) -> ServingEndpointsExt: return self._serving_endpoints @property - def serving_endpoints_data_plane(self) -> service.serving.ServingEndpointsDataPlaneAPI: + def serving_endpoints_data_plane( + self, + ) -> service.serving.ServingEndpointsDataPlaneAPI: """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" return self._serving_endpoints_data_plane @@ -747,7 +778,9 @@ def tables(self) -> service.catalog.TablesAPI: return self._tables @property - def temporary_table_credentials(self) -> service.catalog.TemporaryTableCredentialsAPI: + def temporary_table_credentials( + self, + ) -> service.catalog.TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" return self._temporary_table_credentials @@ -767,12 +800,16 @@ def users(self) -> service.iam.UsersAPI: return self._users @property - def vector_search_endpoints(self) -> service.vectorsearch.VectorSearchEndpointsAPI: + def vector_search_endpoints( + self, + ) -> service.vectorsearch.VectorSearchEndpointsAPI: """**Endpoint**: Represents the compute resources to host vector search indexes.""" return self._vector_search_endpoints @property - def vector_search_indexes(self) -> service.vectorsearch.VectorSearchIndexesAPI: + def vector_search_indexes( + self, + ) -> service.vectorsearch.VectorSearchIndexesAPI: """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries.""" return self._vector_search_indexes @@ -803,9 +840,11 @@ def workspace_conf(self) -> service.settings.WorkspaceConfAPI: def get_workspace_id(self) -> int: """Get the workspace ID of the workspace that this client is connected to.""" - response = self._api_client.do("GET", - "/api/2.0/preview/scim/v2/Me", - response_headers=['X-Databricks-Org-Id']) + response = self._api_client.do( + "GET", + "/api/2.0/preview/scim/v2/Me", + response_headers=["X-Databricks-Org-Id"], + ) return int(response["X-Databricks-Org-Id"]) def __repr__(self): @@ -817,58 +856,62 @@ class AccountClient: The AccountClient is a client for the account-level Databricks REST API. """ - def __init__(self, - *, - host: Optional[str] = None, - account_id: Optional[str] = None, - username: Optional[str] = None, - password: Optional[str] = None, - client_id: Optional[str] = None, - client_secret: Optional[str] = None, - token: Optional[str] = None, - profile: Optional[str] = None, - config_file: Optional[str] = None, - azure_workspace_resource_id: Optional[str] = None, - azure_client_secret: Optional[str] = None, - azure_client_id: Optional[str] = None, - azure_tenant_id: Optional[str] = None, - azure_environment: Optional[str] = None, - auth_type: Optional[str] = None, - cluster_id: Optional[str] = None, - google_credentials: Optional[str] = None, - google_service_account: Optional[str] = None, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - config: Optional[client.Config] = None): + def __init__( + self, + *, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, + product="unknown", + product_version="0.0.0", + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None, + ): if not config: - config = client.Config(host=host, - account_id=account_id, - username=username, - password=password, - client_id=client_id, - client_secret=client_secret, - token=token, - profile=profile, - config_file=config_file, - azure_workspace_resource_id=azure_workspace_resource_id, - azure_client_secret=azure_client_secret, - azure_client_id=azure_client_id, - azure_tenant_id=azure_tenant_id, - azure_environment=azure_environment, - auth_type=auth_type, - cluster_id=cluster_id, - google_credentials=google_credentials, - google_service_account=google_service_account, - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version) + config = client.Config( + host=host, + account_id=account_id, + username=username, + password=password, + client_id=client_id, + client_secret=client_secret, + token=token, + profile=profile, + config_file=config_file, + azure_workspace_resource_id=azure_workspace_resource_id, + azure_client_secret=azure_client_secret, + azure_client_id=azure_client_id, + azure_tenant_id=azure_tenant_id, + azure_environment=azure_environment, + auth_type=auth_type, + cluster_id=cluster_id, + google_credentials=google_credentials, + google_service_account=google_service_account, + credentials_strategy=credentials_strategy, + credentials_provider=credentials_provider, + debug_truncate_bytes=debug_truncate_bytes, + debug_headers=debug_headers, + product=product, + product_version=product_version, + ) self._config = config.copy() self._api_client = client.ApiClient(self._config) self._access_control = service.iam.AccountAccessControlAPI(self._api_client) @@ -888,8 +931,7 @@ def __init__(self, self._o_auth_published_apps = service.oauth2.OAuthPublishedAppsAPI(self._api_client) self._private_access = service.provisioning.PrivateAccessAPI(self._api_client) self._published_app_integration = service.oauth2.PublishedAppIntegrationAPI(self._api_client) - self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI( - self._api_client) + self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI(self._api_client) self._service_principal_secrets = service.oauth2.ServicePrincipalSecretsAPI(self._api_client) self._service_principals = service.iam.AccountServicePrincipalsAPI(self._api_client) self._settings = service.settings.AccountSettingsAPI(self._api_client) @@ -961,7 +1003,9 @@ def log_delivery(self) -> service.billing.LogDeliveryAPI: return self._log_delivery @property - def metastore_assignments(self) -> service.catalog.AccountMetastoreAssignmentsAPI: + def metastore_assignments( + self, + ) -> service.catalog.AccountMetastoreAssignmentsAPI: """These APIs manage metastore assignments to a workspace.""" return self._metastore_assignments @@ -991,17 +1035,23 @@ def private_access(self) -> service.provisioning.PrivateAccessAPI: return self._private_access @property - def published_app_integration(self) -> service.oauth2.PublishedAppIntegrationAPI: + def published_app_integration( + self, + ) -> service.oauth2.PublishedAppIntegrationAPI: """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration @property - def service_principal_federation_policy(self) -> service.oauth2.ServicePrincipalFederationPolicyAPI: + def service_principal_federation_policy( + self, + ) -> service.oauth2.ServicePrincipalFederationPolicyAPI: """These APIs manage service principal federation policies.""" return self._service_principal_federation_policy @property - def service_principal_secrets(self) -> service.oauth2.ServicePrincipalSecretsAPI: + def service_principal_secrets( + self, + ) -> service.oauth2.ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets.""" return self._service_principal_secrets @@ -1021,7 +1071,9 @@ def storage(self) -> service.provisioning.StorageAPI: return self._storage @property - def storage_credentials(self) -> service.catalog.AccountStorageCredentialsAPI: + def storage_credentials( + self, + ) -> service.catalog.AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py index f0950f656..90570de97 100644 --- a/databricks/sdk/_base_client.py +++ b/databricks/sdk/_base_client.py @@ -17,7 +17,7 @@ from .logger import RoundTrip from .retries import retried -logger = logging.getLogger('databricks.sdk') +logger = logging.getLogger("databricks.sdk") def _fix_host_if_needed(host: Optional[str]) -> Optional[str]: @@ -25,35 +25,37 @@ def _fix_host_if_needed(host: Optional[str]) -> Optional[str]: return host # Add a default scheme if it's missing - if '://' not in host: - host = 'https://' + host + if "://" not in host: + host = "https://" + host o = urllib.parse.urlparse(host) # remove trailing slash - path = o.path.rstrip('/') + path = o.path.rstrip("/") # remove port if 443 netloc = o.netloc if o.port == 443: - netloc = netloc.split(':')[0] + netloc = netloc.split(":")[0] return urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment)) class _BaseClient: - def __init__(self, - debug_truncate_bytes: int = None, - retry_timeout_seconds: int = None, - user_agent_base: str = None, - header_factory: Callable[[], dict] = None, - max_connection_pools: int = None, - max_connections_per_pool: int = None, - pool_block: bool = True, - http_timeout_seconds: float = None, - extra_error_customizers: List[_ErrorCustomizer] = None, - debug_headers: bool = False, - clock: Clock = None, - streaming_buffer_size: int = 1024 * 1024): # 1MB + def __init__( + self, + debug_truncate_bytes: int = None, + retry_timeout_seconds: int = None, + user_agent_base: str = None, + header_factory: Callable[[], dict] = None, + max_connection_pools: int = None, + max_connections_per_pool: int = None, + pool_block: bool = True, + http_timeout_seconds: float = None, + extra_error_customizers: List[_ErrorCustomizer] = None, + debug_headers: bool = False, + clock: Clock = None, + streaming_buffer_size: int = 1024 * 1024, + ): # 1MB """ :param debug_truncate_bytes: :param retry_timeout_seconds: @@ -87,9 +89,11 @@ def __init__(self, # We don't use `max_retries` from HTTPAdapter to align with a more production-ready # retry strategy established in the Databricks SDK for Go. See _is_retryable and # @retried for more details. - http_adapter = requests.adapters.HTTPAdapter(pool_connections=max_connections_per_pool or 20, - pool_maxsize=max_connection_pools or 20, - pool_block=pool_block) + http_adapter = requests.adapters.HTTPAdapter( + pool_connections=max_connections_per_pool or 20, + pool_maxsize=max_connection_pools or 20, + pool_block=pool_block, + ) self._session.mount("https://", http_adapter) # Default to 60 seconds @@ -110,7 +114,7 @@ def _fix_query_string(query: Optional[dict] = None) -> Optional[dict]: # See: https://github.com/databricks/databricks-sdk-py/issues/142 if query is None: return None - with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()} + with_fixed_bools = {k: v if type(v) != bool else ("true" if v else "false") for k, v in query.items()} # Query parameters may be nested, e.g. # {'filter_by': {'user_ids': [123, 456]}} @@ -140,30 +144,34 @@ def _is_seekable_stream(data) -> bool: return False return data.seekable() - def do(self, - method: str, - url: str, - query: dict = None, - headers: dict = None, - body: dict = None, - raw: bool = False, - files=None, - data=None, - auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, - response_headers: List[str] = None) -> Union[dict, list, BinaryIO]: + def do( + self, + method: str, + url: str, + query: dict = None, + headers: dict = None, + body: dict = None, + raw: bool = False, + files=None, + data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, + response_headers: List[str] = None, + ) -> Union[dict, list, BinaryIO]: if headers is None: headers = {} - headers['User-Agent'] = self._user_agent_base + headers["User-Agent"] = self._user_agent_base # Wrap strings and bytes in a seekable stream so that we can rewind them. if isinstance(data, (str, bytes)): - data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data) + data = io.BytesIO(data.encode("utf-8") if isinstance(data, str) else data) if not data: # The request is not a stream. - call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), - is_retryable=self._is_retryable, - clock=self._clock)(self._perform) + call = retried( + timeout=timedelta(seconds=self._retry_timeout_seconds), + is_retryable=self._is_retryable, + clock=self._clock, + )(self._perform) elif self._is_seekable_stream(data): # Keep track of the initial position of the stream so that we can rewind to it # if we need to retry the request. @@ -173,25 +181,29 @@ def rewind(): logger.debug(f"Rewinding input data to offset {initial_data_position} before retry") data.seek(initial_data_position) - call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), - is_retryable=self._is_retryable, - clock=self._clock, - before_retry=rewind)(self._perform) + call = retried( + timeout=timedelta(seconds=self._retry_timeout_seconds), + is_retryable=self._is_retryable, + clock=self._clock, + before_retry=rewind, + )(self._perform) else: # Do not retry if the stream is not seekable. This is necessary to avoid bugs # where the retry doesn't re-read already read data from the stream. logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}") call = self._perform - response = call(method, - url, - query=query, - headers=headers, - body=body, - raw=raw, - files=files, - data=data, - auth=auth) + response = call( + method, + url, + query=query, + headers=headers, + body=body, + raw=raw, + files=files, + data=data, + auth=auth, + ) resp = dict() for header in response_headers if response_headers else []: @@ -220,6 +232,7 @@ def _is_retryable(err: BaseException) -> Optional[str]: # and Databricks SDK for Go retries # (see https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go) from urllib3.exceptions import ProxyError + if isinstance(err, ProxyError): err = err.original_error if isinstance(err, requests.ConnectionError): @@ -230,48 +243,55 @@ def _is_retryable(err: BaseException) -> Optional[str]: # # return a simple string for debug log readability, as `raise TimeoutError(...) from err` # will bubble up the original exception in case we reach max retries. - return f'cannot connect' + return f"cannot connect" if isinstance(err, requests.Timeout): # corresponds to `TLS handshake timeout` and `i/o timeout` in Go. # # return a simple string for debug log readability, as `raise TimeoutError(...) from err` # will bubble up the original exception in case we reach max retries. - return f'timeout' + return f"timeout" if isinstance(err, DatabricksError): message = str(err) transient_error_string_matches = [ "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException", - "does not have any associated worker environments", "There is no worker environment with id", - "Unknown worker environment", "ClusterNotReadyException", "Unexpected error", + "does not have any associated worker environments", + "There is no worker environment with id", + "Unknown worker environment", + "ClusterNotReadyException", + "Unexpected error", "Please try again later or try a faster operation.", "RPC token bucket limit has been exceeded", ] for substring in transient_error_string_matches: if substring not in message: continue - return f'matched {substring}' + return f"matched {substring}" return None - def _perform(self, - method: str, - url: str, - query: dict = None, - headers: dict = None, - body: dict = None, - raw: bool = False, - files=None, - data=None, - auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None): - response = self._session.request(method, - url, - params=self._fix_query_string(query), - json=body, - headers=headers, - files=files, - data=data, - auth=auth, - stream=raw, - timeout=self._http_timeout_seconds) + def _perform( + self, + method: str, + url: str, + query: dict = None, + headers: dict = None, + body: dict = None, + raw: bool = False, + files=None, + data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, + ): + response = self._session.request( + method, + url, + params=self._fix_query_string(query), + json=body, + headers=headers, + files=files, + data=data, + auth=auth, + stream=raw, + timeout=self._http_timeout_seconds, + ) self._record_request_log(response, raw=raw or data is not None or files is not None) error = self._error_parser.get_api_error(response) if error is not None: @@ -312,7 +332,7 @@ def flush(self) -> int: def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None): self._response = response - self._buffer = b'' + self._buffer = b"" self._content = None self._chunk_size = chunk_size @@ -338,14 +358,14 @@ def isatty(self) -> bool: def read(self, n: int = -1) -> bytes: """ - Read up to n bytes from the response stream. If n is negative, read - until the end of the stream. + Read up to n bytes from the response stream. If n is negative, read + until the end of the stream. """ self._open() read_everything = n < 0 remaining_bytes = n - res = b'' + res = b"" while remaining_bytes > 0 or read_everything: if len(self._buffer) == 0: try: @@ -395,8 +415,12 @@ def __next__(self) -> bytes: def __iter__(self) -> Iterator[bytes]: return self._content - def __exit__(self, t: Union[Type[BaseException], None], value: Union[BaseException, None], - traceback: Union[TracebackType, None]) -> None: + def __exit__( + self, + t: Union[Type[BaseException], None], + value: Union[BaseException, None], + traceback: Union[TracebackType, None], + ) -> None: self._content = None - self._buffer = b'' + self._buffer = b"" self.close() diff --git a/databricks/sdk/_property.py b/databricks/sdk/_property.py index 7e04ca97e..6dda68896 100644 --- a/databricks/sdk/_property.py +++ b/databricks/sdk/_property.py @@ -16,8 +16,9 @@ def __set_name__(self, owner, name): if self.attrname is None: self.attrname = name elif name != self.attrname: - raise TypeError("Cannot assign the same cached_property to two different names " - f"({self.attrname!r} and {name!r}).") + raise TypeError( + "Cannot assign the same cached_property to two different names " f"({self.attrname!r} and {name!r})." + ) def __get__(self, instance, owner=None): if instance is None: @@ -26,9 +27,11 @@ def __get__(self, instance, owner=None): raise TypeError("Cannot use cached_property instance without calling __set_name__ on it.") try: cache = instance.__dict__ - except AttributeError: # not all objects have __dict__ (e.g. class defines slots) - msg = (f"No '__dict__' attribute on {type(instance).__name__!r} " - f"instance to cache {self.attrname!r} property.") + except AttributeError: # not all objects have __dict__ (e.g. class defines slots) + msg = ( + f"No '__dict__' attribute on {type(instance).__name__!r} " + f"instance to cache {self.attrname!r} property." + ) raise TypeError(msg) from None val = cache.get(self.attrname, _NOT_FOUND) if val is _NOT_FOUND: @@ -36,7 +39,9 @@ def __get__(self, instance, owner=None): try: cache[self.attrname] = val except TypeError: - msg = (f"The '__dict__' attribute on {type(instance).__name__!r} instance " - f"does not support item assignment for caching {self.attrname!r} property.") + msg = ( + f"The '__dict__' attribute on {type(instance).__name__!r} instance " + f"does not support item assignment for caching {self.attrname!r} property." + ) raise TypeError(msg) from None return val diff --git a/databricks/sdk/_widgets/__init__.py b/databricks/sdk/_widgets/__init__.py index 0cd033a55..3f9c4eefc 100644 --- a/databricks/sdk/_widgets/__init__.py +++ b/databricks/sdk/_widgets/__init__.py @@ -43,7 +43,17 @@ def _remove_all(self): # Detect if we are in an interactive notebook by iterating over the mro of the current ipython instance, # to find ZMQInteractiveShell (jupyter). When used from REPL or file, this check will fail, since the # mro only contains TerminalInteractiveShell. - if len(list(filter(lambda i: i.__name__ == 'ZMQInteractiveShell', get_ipython().__class__.__mro__))) == 0: + if ( + len( + list( + filter( + lambda i: i.__name__ == "ZMQInteractiveShell", + get_ipython().__class__.__mro__, + ) + ) + ) + == 0 + ): logging.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.") raise EnvironmentError("Not in an interactive notebook.") @@ -61,7 +71,8 @@ def _remove_all(self): warnings.warn( "\nTo use databricks widgets interactively in your notebook, please install databricks sdk using:\n" "\tpip install 'databricks-sdk[notebook]'\n" - "Falling back to default_value_only implementation for databricks widgets.") + "Falling back to default_value_only implementation for databricks widgets." + ) logging.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.") raise e diff --git a/databricks/sdk/_widgets/default_widgets_utils.py b/databricks/sdk/_widgets/default_widgets_utils.py index 9b61a75f6..1775c6986 100644 --- a/databricks/sdk/_widgets/default_widgets_utils.py +++ b/databricks/sdk/_widgets/default_widgets_utils.py @@ -11,25 +11,31 @@ def __init__(self) -> None: def text(self, name: str, defaultValue: str, label: typing.Optional[str] = None): self._widgets[name] = defaultValue - def dropdown(self, - name: str, - defaultValue: str, - choices: typing.List[str], - label: typing.Optional[str] = None): + def dropdown( + self, + name: str, + defaultValue: str, + choices: typing.List[str], + label: typing.Optional[str] = None, + ): self._widgets[name] = defaultValue - def combobox(self, - name: str, - defaultValue: str, - choices: typing.List[str], - label: typing.Optional[str] = None): + def combobox( + self, + name: str, + defaultValue: str, + choices: typing.List[str], + label: typing.Optional[str] = None, + ): self._widgets[name] = defaultValue - def multiselect(self, - name: str, - defaultValue: str, - choices: typing.List[str], - label: typing.Optional[str] = None): + def multiselect( + self, + name: str, + defaultValue: str, + choices: typing.List[str], + label: typing.Optional[str] = None, + ): self._widgets[name] = defaultValue def _get(self, name: str) -> str: diff --git a/databricks/sdk/_widgets/ipywidgets_utils.py b/databricks/sdk/_widgets/ipywidgets_utils.py index 6f27df438..6e002562e 100644 --- a/databricks/sdk/_widgets/ipywidgets_utils.py +++ b/databricks/sdk/_widgets/ipywidgets_utils.py @@ -28,7 +28,7 @@ def value(self): if type(value) == str or value is None: return value if type(value) == list or type(value) == tuple: - return ','.join(value) + return ",".join(value) raise ValueError("The returned value has invalid type (" + type(value) + ").") @@ -38,7 +38,12 @@ class IPyWidgetUtil(WidgetUtils): def __init__(self) -> None: self._widgets: typing.Dict[str, DbUtilsWidget] = {} - def _register(self, name: str, widget: ValueWidget, label: typing.Optional[str] = None): + def _register( + self, + name: str, + widget: ValueWidget, + label: typing.Optional[str] = None, + ): label = label if label is not None else name w = DbUtilsWidget(label, widget) @@ -51,29 +56,47 @@ def _register(self, name: str, widget: ValueWidget, label: typing.Optional[str] def text(self, name: str, defaultValue: str, label: typing.Optional[str] = None): self._register(name, widget_string.Text(defaultValue), label) - def dropdown(self, - name: str, - defaultValue: str, - choices: typing.List[str], - label: typing.Optional[str] = None): - self._register(name, widget_selection.Dropdown(value=defaultValue, options=choices), label) - - def combobox(self, - name: str, - defaultValue: str, - choices: typing.List[str], - label: typing.Optional[str] = None): - self._register(name, widget_string.Combobox(value=defaultValue, options=choices), label) - - def multiselect(self, - name: str, - defaultValue: str, - choices: typing.List[str], - label: typing.Optional[str] = None): + def dropdown( + self, + name: str, + defaultValue: str, + choices: typing.List[str], + label: typing.Optional[str] = None, + ): self._register( name, - widget_selection.SelectMultiple(value=(defaultValue, ), - options=[("__EMPTY__", ""), *list(zip(choices, choices))]), label) + widget_selection.Dropdown(value=defaultValue, options=choices), + label, + ) + + def combobox( + self, + name: str, + defaultValue: str, + choices: typing.List[str], + label: typing.Optional[str] = None, + ): + self._register( + name, + widget_string.Combobox(value=defaultValue, options=choices), + label, + ) + + def multiselect( + self, + name: str, + defaultValue: str, + choices: typing.List[str], + label: typing.Optional[str] = None, + ): + self._register( + name, + widget_selection.SelectMultiple( + value=(defaultValue,), + options=[("__EMPTY__", ""), *list(zip(choices, choices))], + ), + label, + ) def _get(self, name: str) -> str: return self._widgets[name].value diff --git a/databricks/sdk/azure.py b/databricks/sdk/azure.py index 372669d61..9bb000d76 100644 --- a/databricks/sdk/azure.py +++ b/databricks/sdk/azure.py @@ -4,14 +4,14 @@ from .service.provisioning import Workspace -def add_workspace_id_header(cfg: 'Config', headers: Dict[str, str]): +def add_workspace_id_header(cfg: "Config", headers: Dict[str, str]): if cfg.azure_workspace_resource_id: headers["X-Databricks-Azure-Workspace-Resource-Id"] = cfg.azure_workspace_resource_id -def add_sp_management_token(token_source: 'TokenSource', headers: Dict[str, str]): +def add_sp_management_token(token_source: "TokenSource", headers: Dict[str, str]): mgmt_token = token_source.token() - headers['X-Databricks-Azure-SP-Management-Token'] = mgmt_token.access_token + headers["X-Databricks-Azure-SP-Management-Token"] = mgmt_token.access_token def get_azure_resource_id(workspace: Workspace): @@ -22,6 +22,8 @@ def get_azure_resource_id(workspace: Workspace): """ if workspace.azure_workspace_info is None: return None - return (f'/subscriptions/{workspace.azure_workspace_info.subscription_id}' - f'/resourceGroups/{workspace.azure_workspace_info.resource_group}' - f'/providers/Microsoft.Databricks/workspaces/{workspace.workspace_name}') + return ( + f"/subscriptions/{workspace.azure_workspace_info.subscription_id}" + f"/resourceGroups/{workspace.azure_workspace_info.resource_group}" + f"/providers/Microsoft.Databricks/workspaces/{workspace.workspace_name}" + ) diff --git a/databricks/sdk/casing.py b/databricks/sdk/casing.py index 02b5a030f..5e0af17b4 100644 --- a/databricks/sdk/casing.py +++ b/databricks/sdk/casing.py @@ -8,22 +8,22 @@ def __init__(self, raw_name: str): for ch in raw_name: if ch.isupper(): if segment: - self._segments.append(''.join(segment)) + self._segments.append("".join(segment)) segment = [ch.lower()] elif ch.islower(): segment.append(ch) else: if segment: - self._segments.append(''.join(segment)) + self._segments.append("".join(segment)) segment = [] if segment: - self._segments.append(''.join(segment)) + self._segments.append("".join(segment)) def to_snake_case(self) -> str: - return '_'.join(self._segments) + return "_".join(self._segments) def to_header_case(self) -> str: - return '-'.join([s.capitalize() for s in self._segments]) + return "-".join([s.capitalize() for s in self._segments]) class Casing(object): diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index 490c6ba4e..3334fa94f 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -19,11 +19,11 @@ get_azure_entra_id_workspace_endpoints, get_workspace_endpoints) -logger = logging.getLogger('databricks.sdk') +logger = logging.getLogger("databricks.sdk") class ConfigAttribute: - """ Configuration attribute metadata and descriptor protocols. """ + """Configuration attribute metadata and descriptor protocols.""" # name and transform are discovered from Config.__new__ name: str = None @@ -34,12 +34,12 @@ def __init__(self, env: str = None, auth: str = None, sensitive: bool = False): self.auth = auth self.sensitive = sensitive - def __get__(self, cfg: 'Config', owner): + def __get__(self, cfg: "Config", owner): if not cfg: return None return cfg._inner.get(self.name, None) - def __set__(self, cfg: 'Config', value: any): + def __set__(self, cfg: "Config", value: any): cfg._inner[self.name] = self.transform(value) def __repr__(self) -> str: @@ -57,71 +57,77 @@ def with_user_agent_extra(key: str, value: str): class Config: - host: str = ConfigAttribute(env='DATABRICKS_HOST') - account_id: str = ConfigAttribute(env='DATABRICKS_ACCOUNT_ID') - token: str = ConfigAttribute(env='DATABRICKS_TOKEN', auth='pat', sensitive=True) - username: str = ConfigAttribute(env='DATABRICKS_USERNAME', auth='basic') - password: str = ConfigAttribute(env='DATABRICKS_PASSWORD', auth='basic', sensitive=True) - client_id: str = ConfigAttribute(env='DATABRICKS_CLIENT_ID', auth='oauth') - client_secret: str = ConfigAttribute(env='DATABRICKS_CLIENT_SECRET', auth='oauth', sensitive=True) - profile: str = ConfigAttribute(env='DATABRICKS_CONFIG_PROFILE') - config_file: str = ConfigAttribute(env='DATABRICKS_CONFIG_FILE') - google_service_account: str = ConfigAttribute(env='DATABRICKS_GOOGLE_SERVICE_ACCOUNT', auth='google') - google_credentials: str = ConfigAttribute(env='GOOGLE_CREDENTIALS', auth='google', sensitive=True) - azure_workspace_resource_id: str = ConfigAttribute(env='DATABRICKS_AZURE_RESOURCE_ID', auth='azure') - azure_use_msi: bool = ConfigAttribute(env='ARM_USE_MSI', auth='azure') - azure_client_secret: str = ConfigAttribute(env='ARM_CLIENT_SECRET', auth='azure', sensitive=True) - azure_client_id: str = ConfigAttribute(env='ARM_CLIENT_ID', auth='azure') - azure_tenant_id: str = ConfigAttribute(env='ARM_TENANT_ID', auth='azure') - azure_environment: str = ConfigAttribute(env='ARM_ENVIRONMENT') - databricks_cli_path: str = ConfigAttribute(env='DATABRICKS_CLI_PATH') - auth_type: str = ConfigAttribute(env='DATABRICKS_AUTH_TYPE') - cluster_id: str = ConfigAttribute(env='DATABRICKS_CLUSTER_ID') - warehouse_id: str = ConfigAttribute(env='DATABRICKS_WAREHOUSE_ID') - serverless_compute_id: str = ConfigAttribute(env='DATABRICKS_SERVERLESS_COMPUTE_ID') + host: str = ConfigAttribute(env="DATABRICKS_HOST") + account_id: str = ConfigAttribute(env="DATABRICKS_ACCOUNT_ID") + token: str = ConfigAttribute(env="DATABRICKS_TOKEN", auth="pat", sensitive=True) + username: str = ConfigAttribute(env="DATABRICKS_USERNAME", auth="basic") + password: str = ConfigAttribute(env="DATABRICKS_PASSWORD", auth="basic", sensitive=True) + client_id: str = ConfigAttribute(env="DATABRICKS_CLIENT_ID", auth="oauth") + client_secret: str = ConfigAttribute(env="DATABRICKS_CLIENT_SECRET", auth="oauth", sensitive=True) + profile: str = ConfigAttribute(env="DATABRICKS_CONFIG_PROFILE") + config_file: str = ConfigAttribute(env="DATABRICKS_CONFIG_FILE") + google_service_account: str = ConfigAttribute(env="DATABRICKS_GOOGLE_SERVICE_ACCOUNT", auth="google") + google_credentials: str = ConfigAttribute(env="GOOGLE_CREDENTIALS", auth="google", sensitive=True) + azure_workspace_resource_id: str = ConfigAttribute(env="DATABRICKS_AZURE_RESOURCE_ID", auth="azure") + azure_use_msi: bool = ConfigAttribute(env="ARM_USE_MSI", auth="azure") + azure_client_secret: str = ConfigAttribute(env="ARM_CLIENT_SECRET", auth="azure", sensitive=True) + azure_client_id: str = ConfigAttribute(env="ARM_CLIENT_ID", auth="azure") + azure_tenant_id: str = ConfigAttribute(env="ARM_TENANT_ID", auth="azure") + azure_environment: str = ConfigAttribute(env="ARM_ENVIRONMENT") + databricks_cli_path: str = ConfigAttribute(env="DATABRICKS_CLI_PATH") + auth_type: str = ConfigAttribute(env="DATABRICKS_AUTH_TYPE") + cluster_id: str = ConfigAttribute(env="DATABRICKS_CLUSTER_ID") + warehouse_id: str = ConfigAttribute(env="DATABRICKS_WAREHOUSE_ID") + serverless_compute_id: str = ConfigAttribute(env="DATABRICKS_SERVERLESS_COMPUTE_ID") skip_verify: bool = ConfigAttribute() http_timeout_seconds: float = ConfigAttribute() - debug_truncate_bytes: int = ConfigAttribute(env='DATABRICKS_DEBUG_TRUNCATE_BYTES') - debug_headers: bool = ConfigAttribute(env='DATABRICKS_DEBUG_HEADERS') - rate_limit: int = ConfigAttribute(env='DATABRICKS_RATE_LIMIT') + debug_truncate_bytes: int = ConfigAttribute(env="DATABRICKS_DEBUG_TRUNCATE_BYTES") + debug_headers: bool = ConfigAttribute(env="DATABRICKS_DEBUG_HEADERS") + rate_limit: int = ConfigAttribute(env="DATABRICKS_RATE_LIMIT") retry_timeout_seconds: int = ConfigAttribute() - metadata_service_url = ConfigAttribute(env='DATABRICKS_METADATA_SERVICE_URL', - auth='metadata-service', - sensitive=True) + metadata_service_url = ConfigAttribute( + env="DATABRICKS_METADATA_SERVICE_URL", + auth="metadata-service", + sensitive=True, + ) max_connection_pools: int = ConfigAttribute() max_connections_per_pool: int = ConfigAttribute() databricks_environment: Optional[DatabricksEnvironment] = None - enable_experimental_files_api_client: bool = ConfigAttribute( - env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT') + enable_experimental_files_api_client: bool = ConfigAttribute(env="DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT") files_api_client_download_max_total_recovers = None files_api_client_download_max_total_recovers_without_progressing = 1 def __init__( - self, - *, - # Deprecated. Use credentials_strategy instead. - credentials_provider: Optional[CredentialsStrategy] = None, - credentials_strategy: Optional[CredentialsStrategy] = None, - product=None, - product_version=None, - clock: Optional[Clock] = None, - **kwargs): + self, + *, + # Deprecated. Use credentials_strategy instead. + credentials_provider: Optional[CredentialsStrategy] = None, + credentials_strategy: Optional[CredentialsStrategy] = None, + product=None, + product_version=None, + clock: Optional[Clock] = None, + **kwargs, + ): self._header_factory = None self._inner = {} self._user_agent_other_info = [] if credentials_strategy and credentials_provider: - raise ValueError( - "When providing `credentials_strategy` field, `credential_provider` cannot be specified.") + raise ValueError("When providing `credentials_strategy` field, `credential_provider` cannot be specified.") if credentials_provider: - logger.warning( - "parameter 'credentials_provider' is deprecated. Use 'credentials_strategy' instead.") + logger.warning("parameter 'credentials_provider' is deprecated. Use 'credentials_strategy' instead.") self._credentials_strategy = next( - s for s in [credentials_strategy, credentials_provider, - DefaultCredentials()] if s is not None) - if 'databricks_environment' in kwargs: - self.databricks_environment = kwargs['databricks_environment'] - del kwargs['databricks_environment'] + s + for s in [ + credentials_strategy, + credentials_provider, + DefaultCredentials(), + ] + if s is not None + ) + if "databricks_environment" in kwargs: + self.databricks_environment = kwargs["databricks_environment"] + del kwargs["databricks_environment"] self._clock = clock if clock is not None else RealClock() try: self._set_inner_config(kwargs) @@ -145,15 +151,15 @@ def wrap_debug_info(self, message: str) -> str: return message @staticmethod - def parse_dsn(dsn: str) -> 'Config': + def parse_dsn(dsn: str) -> "Config": uri = urllib.parse.urlparse(dsn) - if uri.scheme != 'databricks': - raise ValueError(f'Expected databricks:// scheme, got {uri.scheme}://') - kwargs = {'host': f'https://{uri.hostname}'} + if uri.scheme != "databricks": + raise ValueError(f"Expected databricks:// scheme, got {uri.scheme}://") + kwargs = {"host": f"https://{uri.hostname}"} if uri.username: - kwargs['username'] = uri.username + kwargs["username"] = uri.username if uri.password: - kwargs['password'] = uri.password + kwargs["password"] = uri.password query = dict(urllib.parse.parse_qsl(uri.query)) for attr in Config.attributes(): if attr.name not in query: @@ -162,7 +168,7 @@ def parse_dsn(dsn: str) -> 'Config': return Config(**kwargs) def authenticate(self) -> Dict[str, str]: - """ Returns a list of fresh authentication headers """ + """Returns a list of fresh authentication headers""" return self._header_factory() def as_dict(self) -> dict: @@ -174,9 +180,9 @@ def _get_azure_environment_name(self) -> str: env = self.azure_environment.upper() # Compatibility with older versions of the SDK that allowed users to specify AzurePublicCloud or AzureChinaCloud if env.startswith("AZURE"): - env = env[len("AZURE"):] + env = env[len("AZURE") :] if env.endswith("CLOUD"): - env = env[:-len("CLOUD")] + env = env[: -len("CLOUD")] return env @property @@ -241,19 +247,21 @@ def is_any_auth_configured(self) -> bool: @property def user_agent(self): - """ Returns User-Agent header used by this SDK """ + """Returns User-Agent header used by this SDK""" # global user agent includes SDK version, product name & version, platform info, # and global extra info. Config can have specific extra info associated with it, # such as an override product, auth type, and other user-defined information. - return useragent.to_string(self._product_info, - [("auth", self.auth_type)] + self._user_agent_other_info) + return useragent.to_string( + self._product_info, + [("auth", self.auth_type)] + self._user_agent_other_info, + ) @property def _upstream_user_agent(self) -> str: return " ".join(f"{k}/{v}" for k, v in useragent._get_upstream_user_agent_info()) - def with_user_agent_extra(self, key: str, value: str) -> 'Config': + def with_user_agent_extra(self, key: str, value: str) -> "Config": self._user_agent_other_info.append((key, value)) return self @@ -269,7 +277,7 @@ def oidc_endpoints(self) -> Optional[OidcEndpoints]: return get_workspace_endpoints(self.host) def debug_string(self) -> str: - """ Returns log-friendly representation of configured attributes """ + """Returns log-friendly representation of configured attributes""" buf = [] attrs_used = [] envs_used = [] @@ -279,13 +287,13 @@ def debug_string(self) -> str: value = getattr(self, attr.name) if not value: continue - safe = '***' if attr.sensitive else f'{value}' - attrs_used.append(f'{attr.name}={safe}') + safe = "***" if attr.sensitive else f"{value}" + attrs_used.append(f"{attr.name}={safe}") if attrs_used: buf.append(f'Config: {", ".join(attrs_used)}') if envs_used: buf.append(f'Env: {", ".join(envs_used)}') - return '. '.join(buf) + return ". ".join(buf) def to_dict(self) -> Dict[str, any]: return self._inner @@ -302,16 +310,16 @@ def sql_http_path(self) -> Optional[str]: if (not self.cluster_id) and (not self.warehouse_id): return None if self.cluster_id and self.warehouse_id: - raise ValueError('cannot have both cluster_id and warehouse_id') + raise ValueError("cannot have both cluster_id and warehouse_id") headers = self.authenticate() - headers['User-Agent'] = f'{self.user_agent} sdk-feature/sql-http-path' + headers["User-Agent"] = f"{self.user_agent} sdk-feature/sql-http-path" if self.cluster_id: response = requests.get(f"{self.host}/api/2.0/preview/scim/v2/Me", headers=headers) # get workspace ID from the response header - workspace_id = response.headers.get('x-databricks-org-id') - return f'sql/protocolv1/o/{workspace_id}/{self.cluster_id}' + workspace_id = response.headers.get("x-databricks-org-id") + return f"sql/protocolv1/o/{workspace_id}/{self.cluster_id}" if self.warehouse_id: - return f'/sql/1.0/warehouses/{self.warehouse_id}' + return f"/sql/1.0/warehouses/{self.warehouse_id}" @property def clock(self) -> Clock: @@ -319,17 +327,18 @@ def clock(self) -> Clock: @classmethod def attributes(cls) -> Iterable[ConfigAttribute]: - """ Returns a list of Databricks SDK configuration metadata """ - if hasattr(cls, '_attributes'): + """Returns a list of Databricks SDK configuration metadata""" + if hasattr(cls, "_attributes"): return cls._attributes if sys.version_info[1] >= 10: import inspect + anno = inspect.get_annotations(cls) else: # Python 3.7 compatibility: getting type hints require extra hop, as described in # "Accessing The Annotations Dict Of An Object In Python 3.9 And Older" section of # https://docs.python.org/3/howto/annotations.html - anno = cls.__dict__['__annotations__'] + anno = cls.__dict__["__annotations__"] attrs = [] for name, v in cls.__dict__.items(): if type(v) != ConfigAttribute: @@ -351,26 +360,25 @@ def load_azure_tenant_id(self): If the tenant ID is already set, this method does nothing.""" if not self.is_azure or self.azure_tenant_id is not None or self.host is None: return - login_url = f'{self.host}/aad/auth' - logger.debug(f'Loading tenant ID from {login_url}') + login_url = f"{self.host}/aad/auth" + logger.debug(f"Loading tenant ID from {login_url}") resp = requests.get(login_url, allow_redirects=False) if resp.status_code // 100 != 3: - logger.debug( - f'Failed to get tenant ID from {login_url}: expected status code 3xx, got {resp.status_code}') + logger.debug(f"Failed to get tenant ID from {login_url}: expected status code 3xx, got {resp.status_code}") return - entra_id_endpoint = resp.headers.get('Location') + entra_id_endpoint = resp.headers.get("Location") if entra_id_endpoint is None: - logger.debug(f'No Location header in response from {login_url}') + logger.debug(f"No Location header in response from {login_url}") return # The Location header has the following form: https://login.microsoftonline.com//oauth2/authorize?... # The domain may change depending on the Azure cloud (e.g. login.microsoftonline.us for US Government cloud). url = urllib.parse.urlparse(entra_id_endpoint) - path_segments = url.path.split('/') + path_segments = url.path.split("/") if len(path_segments) < 2: - logger.debug(f'Invalid path in Location header: {url.path}') + logger.debug(f"Invalid path in Location header: {url.path}") return self.azure_tenant_id = path_segments[1] - logger.debug(f'Loaded tenant ID: {self.azure_tenant_id}') + logger.debug(f"Loaded tenant ID: {self.azure_tenant_id}") def _set_inner_config(self, keyword_args: Dict[str, any]): for attr in self.attributes(): @@ -393,11 +401,10 @@ def _load_from_env(self): self.__setattr__(attr.name, value) found = True if found: - logger.debug('Loaded from environment') + logger.debug("Loaded from environment") def _known_file_config_loader(self): - if not self.profile and (self.is_any_auth_configured or self.host - or self.azure_workspace_resource_id): + if not self.profile and (self.is_any_auth_configured or self.host or self.azure_workspace_resource_id): # skip loading configuration file if there's any auth configured # directly as part of the Config() constructor. return @@ -417,15 +424,15 @@ def _known_file_config_loader(self): # from Unified Auth test suite at the moment. Hence, the private variable access. # See: https://docs.python.org/3/library/configparser.html#mapping-protocol-access if not has_explicit_profile and not ini_file.defaults(): - logger.debug(f'{config_path} has no DEFAULT profile configured') + logger.debug(f"{config_path} has no DEFAULT profile configured") return if not has_explicit_profile: profile = "DEFAULT" profiles = ini_file._sections if ini_file.defaults(): - profiles['DEFAULT'] = ini_file.defaults() + profiles["DEFAULT"] = ini_file.defaults() if profile not in profiles: - raise ValueError(f'resolve: {config_path} has no {profile} profile configured') + raise ValueError(f"resolve: {config_path} has no {profile} profile configured") raw_config = profiles[profile] logger.info(f'loading {profile} profile from {config_file}: {", ".join(raw_config.keys())}') for k, v in raw_config.items(): @@ -448,26 +455,29 @@ def _validate(self): # client has auth preference set return names = " and ".join(sorted(auths_used)) - raise ValueError(f'validate: more than one authorization method configured: {names}') + raise ValueError(f"validate: more than one authorization method configured: {names}") def init_auth(self): try: self._header_factory = self._credentials_strategy(self) self.auth_type = self._credentials_strategy.auth_type() if not self._header_factory: - raise ValueError('not configured') + raise ValueError("not configured") except ValueError as e: - raise ValueError(f'{self._credentials_strategy.auth_type()} auth: {e}') from e + raise ValueError(f"{self._credentials_strategy.auth_type()} auth: {e}") from e def _init_product(self, product, product_version): if product is not None or product_version is not None: default_product, default_version = useragent.product() - self._product_info = (product or default_product, product_version or default_version) + self._product_info = ( + product or default_product, + product_version or default_version, + ) else: self._product_info = None def __repr__(self): - return f'<{self.debug_string()}>' + return f"<{self.debug_string()}>" def copy(self): """Creates a copy of the config object. @@ -480,6 +490,5 @@ def copy(self): return cpy def deep_copy(self): - """Creates a deep copy of the config object. - """ + """Creates a deep copy of the config object.""" return copy.deepcopy(self) diff --git a/databricks/sdk/core.py b/databricks/sdk/core.py index eab22cd71..203e84e6c 100644 --- a/databricks/sdk/core.py +++ b/databricks/sdk/core.py @@ -9,9 +9,9 @@ from .errors import DatabricksError, _ErrorCustomizer from .oauth import retrieve_token -__all__ = ['Config', 'DatabricksError'] +__all__ = ["Config", "DatabricksError"] -logger = logging.getLogger('databricks.sdk') +logger = logging.getLogger("databricks.sdk") URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded" JWT_BEARER_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer" @@ -22,16 +22,18 @@ class ApiClient: def __init__(self, cfg: Config): self._cfg = cfg - self._api_client = _BaseClient(debug_truncate_bytes=cfg.debug_truncate_bytes, - retry_timeout_seconds=cfg.retry_timeout_seconds, - user_agent_base=cfg.user_agent, - header_factory=cfg.authenticate, - max_connection_pools=cfg.max_connection_pools, - max_connections_per_pool=cfg.max_connections_per_pool, - pool_block=True, - http_timeout_seconds=cfg.http_timeout_seconds, - extra_error_customizers=[_AddDebugErrorCustomizer(cfg)], - clock=cfg.clock) + self._api_client = _BaseClient( + debug_truncate_bytes=cfg.debug_truncate_bytes, + retry_timeout_seconds=cfg.retry_timeout_seconds, + user_agent_base=cfg.user_agent, + header_factory=cfg.authenticate, + max_connection_pools=cfg.max_connection_pools, + max_connections_per_pool=cfg.max_connections_per_pool, + pool_block=True, + http_timeout_seconds=cfg.http_timeout_seconds, + extra_error_customizers=[_AddDebugErrorCustomizer(cfg)], + clock=cfg.clock, + ) @property def account_id(self) -> str: @@ -46,44 +48,52 @@ def get_oauth_token(self, auth_details: str) -> Token: self._cfg.authenticate() original_token = self._cfg.oauth_token() headers = {"Content-Type": URL_ENCODED_CONTENT_TYPE} - params = urlencode({ - "grant_type": JWT_BEARER_GRANT_TYPE, - "authorization_details": auth_details, - "assertion": original_token.access_token - }) - return retrieve_token(client_id=self._cfg.client_id, - client_secret=self._cfg.client_secret, - token_url=self._cfg.host + OIDC_TOKEN_PATH, - params=params, - headers=headers) + params = urlencode( + { + "grant_type": JWT_BEARER_GRANT_TYPE, + "authorization_details": auth_details, + "assertion": original_token.access_token, + } + ) + return retrieve_token( + client_id=self._cfg.client_id, + client_secret=self._cfg.client_secret, + token_url=self._cfg.host + OIDC_TOKEN_PATH, + params=params, + headers=headers, + ) - def do(self, - method: str, - path: str = None, - url: str = None, - query: dict = None, - headers: dict = None, - body: dict = None, - raw: bool = False, - files=None, - data=None, - auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, - response_headers: List[str] = None) -> Union[dict, list, BinaryIO]: + def do( + self, + method: str, + path: str = None, + url: str = None, + query: dict = None, + headers: dict = None, + body: dict = None, + raw: bool = False, + files=None, + data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, + response_headers: List[str] = None, + ) -> Union[dict, list, BinaryIO]: if url is None: # Remove extra `/` from path for Files API # Once we've fixed the OpenAPI spec, we can remove this - path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path) + path = re.sub("^/api/2.0/fs/files//", "/api/2.0/fs/files/", path) url = f"{self._cfg.host}{path}" - return self._api_client.do(method=method, - url=url, - query=query, - headers=headers, - body=body, - raw=raw, - files=files, - data=data, - auth=auth, - response_headers=response_headers) + return self._api_client.do( + method=method, + url=url, + query=query, + headers=headers, + body=body, + raw=raw, + files=files, + data=data, + auth=auth, + response_headers=response_headers, + ) class _AddDebugErrorCustomizer(_ErrorCustomizer): @@ -95,5 +105,5 @@ def __init__(self, cfg: Config): def customize_error(self, response: requests.Response, kwargs: dict): if response.status_code in (401, 403): - message = kwargs.get('message', 'request failed') - kwargs['message'] = self._cfg.wrap_debug_info(message) + message = kwargs.get("message", "request failed") + kwargs["message"] = self._cfg.wrap_debug_info(message) diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 24d01f678..8fb1b45c2 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -26,13 +26,17 @@ CredentialsProvider = Callable[[], Dict[str, str]] -logger = logging.getLogger('databricks.sdk') +logger = logging.getLogger("databricks.sdk") class OAuthCredentialsProvider: - """ OAuthCredentialsProvider is a type of CredentialsProvider which exposes OAuth tokens. """ + """OAuthCredentialsProvider is a type of CredentialsProvider which exposes OAuth tokens.""" - def __init__(self, credentials_provider: CredentialsProvider, token_provider: Callable[[], Token]): + def __init__( + self, + credentials_provider: CredentialsProvider, + token_provider: Callable[[], Token], + ): self._credentials_provider = credentials_provider self._token_provider = token_provider @@ -44,45 +48,49 @@ def oauth_token(self) -> Token: class CredentialsStrategy(abc.ABC): - """ CredentialsProvider is the protocol (call-side interface) - for authenticating requests to Databricks REST APIs""" + """CredentialsProvider is the protocol (call-side interface) + for authenticating requests to Databricks REST APIs""" @abc.abstractmethod - def auth_type(self) -> str: - ... + def auth_type(self) -> str: ... @abc.abstractmethod - def __call__(self, cfg: 'Config') -> CredentialsProvider: - ... + def __call__(self, cfg: "Config") -> CredentialsProvider: ... class OauthCredentialsStrategy(CredentialsStrategy): - """ OauthCredentialsProvider is a CredentialsProvider which + """OauthCredentialsProvider is a CredentialsProvider which supports Oauth tokens""" - def __init__(self, auth_type: str, headers_provider: Callable[['Config'], OAuthCredentialsProvider]): + def __init__( + self, + auth_type: str, + headers_provider: Callable[["Config"], OAuthCredentialsProvider], + ): self._headers_provider = headers_provider self._auth_type = auth_type def auth_type(self) -> str: return self._auth_type - def __call__(self, cfg: 'Config') -> OAuthCredentialsProvider: + def __call__(self, cfg: "Config") -> OAuthCredentialsProvider: return self._headers_provider(cfg) - def oauth_token(self, cfg: 'Config') -> Token: + def oauth_token(self, cfg: "Config") -> Token: return self._headers_provider(cfg).oauth_token() def credentials_strategy(name: str, require: List[str]): - """ Given the function that receives a Config and returns RequestVisitor, + """Given the function that receives a Config and returns RequestVisitor, create CredentialsProvider with a given name and required configuration - attribute names to be present for this function to be called. """ + attribute names to be present for this function to be called.""" - def inner(func: Callable[['Config'], CredentialsProvider]) -> CredentialsStrategy: + def inner( + func: Callable[["Config"], CredentialsProvider], + ) -> CredentialsStrategy: @functools.wraps(func) - def wrapper(cfg: 'Config') -> Optional[CredentialsProvider]: + def wrapper(cfg: "Config") -> Optional[CredentialsProvider]: for attr in require: getattr(cfg, attr) if not getattr(cfg, attr): @@ -96,14 +104,16 @@ def wrapper(cfg: 'Config') -> Optional[CredentialsProvider]: def oauth_credentials_strategy(name: str, require: List[str]): - """ Given the function that receives a Config and returns an OauthHeaderFactory, + """Given the function that receives a Config and returns an OauthHeaderFactory, create an OauthCredentialsProvider with a given name and required configuration - attribute names to be present for this function to be called. """ + attribute names to be present for this function to be called.""" - def inner(func: Callable[['Config'], OAuthCredentialsProvider]) -> OauthCredentialsStrategy: + def inner( + func: Callable[["Config"], OAuthCredentialsProvider], + ) -> OauthCredentialsStrategy: @functools.wraps(func) - def wrapper(cfg: 'Config') -> Optional[OAuthCredentialsProvider]: + def wrapper(cfg: "Config") -> Optional[OAuthCredentialsProvider]: for attr in require: if not getattr(cfg, attr): return None @@ -114,11 +124,11 @@ def wrapper(cfg: 'Config') -> Optional[OAuthCredentialsProvider]: return inner -@credentials_strategy('basic', ['host', 'username', 'password']) -def basic_auth(cfg: 'Config') -> CredentialsProvider: - """ Given username and password, add base64-encoded Basic credentials """ - encoded = base64.b64encode(f'{cfg.username}:{cfg.password}'.encode()).decode() - static_credentials = {'Authorization': f'Basic {encoded}'} +@credentials_strategy("basic", ["host", "username", "password"]) +def basic_auth(cfg: "Config") -> CredentialsProvider: + """Given username and password, add base64-encoded Basic credentials""" + encoded = base64.b64encode(f"{cfg.username}:{cfg.password}".encode()).decode() + static_credentials = {"Authorization": f"Basic {encoded}"} def inner() -> Dict[str, str]: return static_credentials @@ -126,10 +136,10 @@ def inner() -> Dict[str, str]: return inner -@credentials_strategy('pat', ['host', 'token']) -def pat_auth(cfg: 'Config') -> CredentialsProvider: - """ Adds Databricks Personal Access Token to every request """ - static_credentials = {'Authorization': f'Bearer {cfg.token}'} +@credentials_strategy("pat", ["host", "token"]) +def pat_auth(cfg: "Config") -> CredentialsProvider: + """Adds Databricks Personal Access Token to every request""" + static_credentials = {"Authorization": f"Bearer {cfg.token}"} def inner() -> Dict[str, str]: return static_credentials @@ -137,9 +147,9 @@ def inner() -> Dict[str, str]: return inner -@credentials_strategy('runtime', []) -def runtime_native_auth(cfg: 'Config') -> Optional[CredentialsProvider]: - if 'DATABRICKS_RUNTIME_VERSION' not in os.environ: +@credentials_strategy("runtime", []) +def runtime_native_auth(cfg: "Config") -> Optional[CredentialsProvider]: + if "DATABRICKS_RUNTIME_VERSION" not in os.environ: return None # This import MUST be after the "DATABRICKS_RUNTIME_VERSION" check @@ -148,36 +158,44 @@ def runtime_native_auth(cfg: 'Config') -> Optional[CredentialsProvider]: from databricks.sdk.runtime import (init_runtime_legacy_auth, init_runtime_native_auth, init_runtime_repl_auth) - for init in [init_runtime_native_auth, init_runtime_repl_auth, init_runtime_legacy_auth]: + + for init in [ + init_runtime_native_auth, + init_runtime_repl_auth, + init_runtime_legacy_auth, + ]: if init is None: continue host, inner = init() if host is None: - logger.debug(f'[{init.__name__}] no host detected') + logger.debug(f"[{init.__name__}] no host detected") continue cfg.host = host - logger.debug(f'[{init.__name__}] runtime native auth configured') + logger.debug(f"[{init.__name__}] runtime native auth configured") return inner return None -@oauth_credentials_strategy('oauth-m2m', ['host', 'client_id', 'client_secret']) -def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]: - """ Adds refreshed Databricks machine-to-machine OAuth Bearer token to every request, - if /oidc/.well-known/oauth-authorization-server is available on the given host. """ +@oauth_credentials_strategy("oauth-m2m", ["host", "client_id", "client_secret"]) +def oauth_service_principal(cfg: "Config") -> Optional[CredentialsProvider]: + """Adds refreshed Databricks machine-to-machine OAuth Bearer token to every request, + if /oidc/.well-known/oauth-authorization-server is available on the given host. + """ oidc = cfg.oidc_endpoints if oidc is None: return None - token_source = ClientCredentials(client_id=cfg.client_id, - client_secret=cfg.client_secret, - token_url=oidc.token_endpoint, - scopes=["all-apis"], - use_header=True) + token_source = ClientCredentials( + client_id=cfg.client_id, + client_secret=cfg.client_secret, + token_url=oidc.token_endpoint, + scopes=["all-apis"], + use_header=True, + ) def inner() -> Dict[str, str]: token = token_source.token() - return {'Authorization': f'{token.token_type} {token.access_token}'} + return {"Authorization": f"{token.token_type} {token.access_token}"} def token() -> Token: return token_source.token() @@ -185,9 +203,9 @@ def token() -> Token: return OAuthCredentialsProvider(inner, token) -@credentials_strategy('external-browser', ['host', 'auth_type']) -def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: - if cfg.auth_type != 'external-browser': +@credentials_strategy("external-browser", ["host", "auth_type"]) +def external_browser(cfg: "Config") -> Optional[CredentialsProvider]: + if cfg.auth_type != "external-browser": return None client_id, client_secret = None, None @@ -198,17 +216,19 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: client_id = cfg.azure_client client_secret = cfg.azure_client_secret if not client_id: - client_id = 'databricks-cli' + client_id = "databricks-cli" # Load cached credentials from disk if they exist. Note that these are # local to the Python SDK and not reused by other SDKs. oidc_endpoints = cfg.oidc_endpoints - redirect_url = 'http://localhost:8020' - token_cache = TokenCache(host=cfg.host, - oidc_endpoints=oidc_endpoints, - client_id=client_id, - client_secret=client_secret, - redirect_url=redirect_url) + redirect_url = "http://localhost:8020" + token_cache = TokenCache( + host=cfg.host, + oidc_endpoints=oidc_endpoints, + client_id=client_id, + client_secret=client_secret, + redirect_url=redirect_url, + ) credentials = token_cache.load() if credentials: try: @@ -219,12 +239,14 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: return credentials(cfg) # TODO: We should ideally use more specific exceptions. except Exception as e: - logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow') - - oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints, - client_id=client_id, - redirect_url=redirect_url, - client_secret=client_secret) + logger.warning(f"Failed to refresh cached token: {e}. Initiating new OAuth login flow") + + oauth_client = OAuthClient( + oidc_endpoints=oidc_endpoints, + client_id=client_id, + redirect_url=redirect_url, + client_secret=client_secret, + ) consent = oauth_client.initiate_consent() if not consent: return None @@ -234,33 +256,41 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]: return credentials(cfg) -def _ensure_host_present(cfg: 'Config', token_source_for: Callable[[str], TokenSource]): - """ Resolves Azure Databricks workspace URL from ARM Resource ID """ +def _ensure_host_present(cfg: "Config", token_source_for: Callable[[str], TokenSource]): + """Resolves Azure Databricks workspace URL from ARM Resource ID""" if cfg.host: return if not cfg.azure_workspace_resource_id: return arm = cfg.arm_environment.resource_manager_endpoint token = token_source_for(arm).token() - resp = requests.get(f"{arm}{cfg.azure_workspace_resource_id}?api-version=2018-04-01", - headers={"Authorization": f"Bearer {token.access_token}"}) + resp = requests.get( + f"{arm}{cfg.azure_workspace_resource_id}?api-version=2018-04-01", + headers={"Authorization": f"Bearer {token.access_token}"}, + ) if not resp.ok: raise ValueError(f"Cannot resolve Azure Databricks workspace: {resp.content}") cfg.host = f"https://{resp.json()['properties']['workspaceUrl']}" -@oauth_credentials_strategy('azure-client-secret', ['is_azure', 'azure_client_id', 'azure_client_secret']) -def azure_service_principal(cfg: 'Config') -> CredentialsProvider: - """ Adds refreshed Azure Active Directory (AAD) Service Principal OAuth tokens - to every request, while automatically resolving different Azure environment endpoints. """ +@oauth_credentials_strategy( + "azure-client-secret", + ["is_azure", "azure_client_id", "azure_client_secret"], +) +def azure_service_principal(cfg: "Config") -> CredentialsProvider: + """Adds refreshed Azure Active Directory (AAD) Service Principal OAuth tokens + to every request, while automatically resolving different Azure environment endpoints. + """ def token_source_for(resource: str) -> TokenSource: aad_endpoint = cfg.arm_environment.active_directory_endpoint - return ClientCredentials(client_id=cfg.azure_client_id, - client_secret=cfg.azure_client_secret, - token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token", - endpoint_params={"resource": resource}, - use_params=True) + return ClientCredentials( + client_id=cfg.azure_client_id, + client_secret=cfg.azure_client_secret, + token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token", + endpoint_params={"resource": resource}, + use_params=True, + ) _ensure_host_present(cfg, token_source_for) cfg.load_azure_tenant_id() @@ -269,7 +299,9 @@ def token_source_for(resource: str) -> TokenSource: cloud = token_source_for(cfg.arm_environment.service_management_endpoint) def refreshed_headers() -> Dict[str, str]: - headers = {'Authorization': f"Bearer {inner.token().access_token}", } + headers = { + "Authorization": f"Bearer {inner.token().access_token}", + } add_workspace_id_header(cfg, headers) add_sp_management_token(cloud, headers) return headers @@ -280,9 +312,9 @@ def token() -> Token: return OAuthCredentialsProvider(refreshed_headers, token) -@oauth_credentials_strategy('github-oidc-azure', ['host', 'azure_client_id']) -def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]: - if 'ACTIONS_ID_TOKEN_REQUEST_TOKEN' not in os.environ: +@oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"]) +def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]: + if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ: # not in GitHub actions return None @@ -292,7 +324,7 @@ def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]: return None # See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers - headers = {'Authorization': f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"} + headers = {"Authorization": f"Bearer {os.environ['ACTIONS_ID_TOKEN_REQUEST_TOKEN']}"} endpoint = f"{os.environ['ACTIONS_ID_TOKEN_REQUEST_URL']}&audience=api://AzureADTokenExchange" response = requests.get(endpoint, headers=headers) if not response.ok: @@ -300,30 +332,34 @@ def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]: # get the ID Token with aud=api://AzureADTokenExchange sub=repo:org/repo:environment:name response_json = response.json() - if 'value' not in response_json: + if "value" not in response_json: return None - logger.info("Configured AAD token for GitHub Actions OIDC (%s)", cfg.azure_client_id) + logger.info( + "Configured AAD token for GitHub Actions OIDC (%s)", + cfg.azure_client_id, + ) params = { - 'client_assertion_type': 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer', - 'resource': cfg.effective_azure_login_app_id, - 'client_assertion': response_json['value'], + "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", + "resource": cfg.effective_azure_login_app_id, + "client_assertion": response_json["value"], } aad_endpoint = cfg.arm_environment.active_directory_endpoint if not cfg.azure_tenant_id: # detect Azure AD Tenant ID if it's not specified directly token_endpoint = cfg.oidc_endpoints.token_endpoint - cfg.azure_tenant_id = token_endpoint.replace(aad_endpoint, '').split('/')[0] + cfg.azure_tenant_id = token_endpoint.replace(aad_endpoint, "").split("/")[0] inner = ClientCredentials( client_id=cfg.azure_client_id, - client_secret="", # we have no (rotatable) secrets in OIDC flow + client_secret="", # we have no (rotatable) secrets in OIDC flow token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token", endpoint_params=params, - use_params=True) + use_params=True, + ) def refreshed_headers() -> Dict[str, str]: token = inner.token() - return {'Authorization': f'{token.token_type} {token.access_token}'} + return {"Authorization": f"{token.token_type} {token.access_token}"} def token() -> Token: return inner.token() @@ -331,29 +367,32 @@ def token() -> Token: return OAuthCredentialsProvider(refreshed_headers, token) -GcpScopes = ["https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute"] +GcpScopes = [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", +] -@oauth_credentials_strategy('google-credentials', ['host', 'google_credentials']) -def google_credentials(cfg: 'Config') -> Optional[CredentialsProvider]: +@oauth_credentials_strategy("google-credentials", ["host", "google_credentials"]) +def google_credentials(cfg: "Config") -> Optional[CredentialsProvider]: if not cfg.is_gcp: return None # Reads credentials as JSON. Credentials can be either a path to JSON file, or actual JSON string. # Obtain the id token by providing the json file path and target audience. - if (os.path.isfile(cfg.google_credentials)): + if os.path.isfile(cfg.google_credentials): with io.open(cfg.google_credentials, "r", encoding="utf-8") as json_file: account_info = json.load(json_file) else: # If the file doesn't exist, assume that the config is the actual JSON content. account_info = json.loads(cfg.google_credentials) - credentials = service_account.IDTokenCredentials.from_service_account_info(info=account_info, - target_audience=cfg.host) + credentials = service_account.IDTokenCredentials.from_service_account_info( + info=account_info, target_audience=cfg.host + ) request = Request() - gcp_credentials = service_account.Credentials.from_service_account_info(info=account_info, - scopes=GcpScopes) + gcp_credentials = service_account.Credentials.from_service_account_info(info=account_info, scopes=GcpScopes) def token() -> Token: credentials.refresh(request) @@ -361,7 +400,7 @@ def token() -> Token: def refreshed_headers() -> Dict[str, str]: credentials.refresh(request) - headers = {'Authorization': f'Bearer {credentials.token}'} + headers = {"Authorization": f"Bearer {credentials.token}"} if cfg.is_account_client: gcp_credentials.refresh(request) headers["X-Databricks-GCP-SA-Access-Token"] = gcp_credentials.token @@ -370,24 +409,29 @@ def refreshed_headers() -> Dict[str, str]: return OAuthCredentialsProvider(refreshed_headers, token) -@oauth_credentials_strategy('google-id', ['host', 'google_service_account']) -def google_id(cfg: 'Config') -> Optional[CredentialsProvider]: +@oauth_credentials_strategy("google-id", ["host", "google_service_account"]) +def google_id(cfg: "Config") -> Optional[CredentialsProvider]: if not cfg.is_gcp: return None credentials, _project_id = google.auth.default() # Create the impersonated credential. - target_credentials = impersonated_credentials.Credentials(source_credentials=credentials, - target_principal=cfg.google_service_account, - target_scopes=[]) + target_credentials = impersonated_credentials.Credentials( + source_credentials=credentials, + target_principal=cfg.google_service_account, + target_scopes=[], + ) # Set the impersonated credential, target audience and token options. - id_creds = impersonated_credentials.IDTokenCredentials(target_credentials, - target_audience=cfg.host, - include_email=True) + id_creds = impersonated_credentials.IDTokenCredentials( + target_credentials, target_audience=cfg.host, include_email=True + ) gcp_impersonated_credentials = impersonated_credentials.Credentials( - source_credentials=credentials, target_principal=cfg.google_service_account, target_scopes=GcpScopes) + source_credentials=credentials, + target_principal=cfg.google_service_account, + target_scopes=GcpScopes, + ) request = Request() @@ -397,7 +441,7 @@ def token() -> Token: def refreshed_headers() -> Dict[str, str]: id_creds.refresh(request) - headers = {'Authorization': f'Bearer {id_creds.token}'} + headers = {"Authorization": f"Bearer {id_creds.token}"} if cfg.is_account_client: gcp_impersonated_credentials.refresh(request) headers["X-Databricks-GCP-SA-Access-Token"] = gcp_impersonated_credentials.token @@ -408,7 +452,13 @@ def refreshed_headers() -> Dict[str, str]: class CliTokenSource(Refreshable): - def __init__(self, cmd: List[str], token_type_field: str, access_token_field: str, expiry_field: str): + def __init__( + self, + cmd: List[str], + token_type_field: str, + access_token_field: str, + expiry_field: str, + ): super().__init__() self._cmd = cmd self._token_type_field = token_type_field @@ -431,52 +481,74 @@ def refresh(self) -> Token: out = _run_subprocess(self._cmd, capture_output=True, check=True) it = json.loads(out.stdout.decode()) expires_on = self._parse_expiry(it[self._expiry_field]) - return Token(access_token=it[self._access_token_field], - token_type=it[self._token_type_field], - expiry=expires_on) + return Token( + access_token=it[self._access_token_field], + token_type=it[self._token_type_field], + expiry=expires_on, + ) except ValueError as e: raise ValueError(f"cannot unmarshal CLI result: {e}") except subprocess.CalledProcessError as e: stdout = e.stdout.decode().strip() stderr = e.stderr.decode().strip() message = stdout or stderr - raise IOError(f'cannot get access token: {message}') from e + raise IOError(f"cannot get access token: {message}") from e -def _run_subprocess(popenargs, - input=None, - capture_output=True, - timeout=None, - check=False, - **kwargs) -> subprocess.CompletedProcess: +def _run_subprocess( + popenargs, + input=None, + capture_output=True, + timeout=None, + check=False, + **kwargs, +) -> subprocess.CompletedProcess: """Runs subprocess with given arguments. - This handles OS-specific modifications that need to be made to the invocation of subprocess.run.""" - kwargs['shell'] = sys.platform.startswith('win') + This handles OS-specific modifications that need to be made to the invocation of subprocess.run. + """ + kwargs["shell"] = sys.platform.startswith("win") # windows requires shell=True to be able to execute 'az login' or other commands # cannot use shell=True all the time, as it breaks macOS logging.debug(f'Running command: {" ".join(popenargs)}') - return subprocess.run(popenargs, - input=input, - capture_output=capture_output, - timeout=timeout, - check=check, - **kwargs) + return subprocess.run( + popenargs, + input=input, + capture_output=capture_output, + timeout=timeout, + check=check, + **kwargs, + ) class AzureCliTokenSource(CliTokenSource): - """ Obtain the token granted by `az login` CLI command """ - - def __init__(self, resource: str, subscription: Optional[str] = None, tenant: Optional[str] = None): - cmd = ["az", "account", "get-access-token", "--resource", resource, "--output", "json"] + """Obtain the token granted by `az login` CLI command""" + + def __init__( + self, + resource: str, + subscription: Optional[str] = None, + tenant: Optional[str] = None, + ): + cmd = [ + "az", + "account", + "get-access-token", + "--resource", + resource, + "--output", + "json", + ] if subscription is not None: cmd.append("--subscription") cmd.append(subscription) if tenant and not self.__is_cli_using_managed_identity(): cmd.extend(["--tenant", tenant]) - super().__init__(cmd=cmd, - token_type_field='tokenType', - access_token_field='accessToken', - expiry_field='expiresOn') + super().__init__( + cmd=cmd, + token_type_field="tokenType", + access_token_field="accessToken", + expiry_field="expiresOn", + ) @staticmethod def __is_cli_using_managed_identity() -> bool: @@ -489,7 +561,8 @@ def __is_cli_using_managed_identity() -> bool: if user is None: return False return user.get("type") == "servicePrincipal" and user.get("name") in [ - 'systemAssignedIdentity', 'userAssignedIdentity' + "systemAssignedIdentity", + "userAssignedIdentity", ] except subprocess.CalledProcessError as e: logger.debug("Failed to get account information from Azure CLI", exc_info=e) @@ -512,15 +585,13 @@ def is_human_user(self) -> bool: guaranteed to be unique within a tenant and should be used only for display purposes. - 'upn' - The username of the user. """ - return 'upn' in self.token().jwt_claims() + return "upn" in self.token().jwt_claims() @staticmethod - def for_resource(cfg: 'Config', resource: str) -> 'AzureCliTokenSource': + def for_resource(cfg: "Config", resource: str) -> "AzureCliTokenSource": subscription = AzureCliTokenSource.get_subscription(cfg) if subscription is not None: - token_source = AzureCliTokenSource(resource, - subscription=subscription, - tenant=cfg.azure_tenant_id) + token_source = AzureCliTokenSource(resource, subscription=subscription, tenant=cfg.azure_tenant_id) try: # This will fail if the user has access to the workspace, but not to the subscription # itself. @@ -535,32 +606,32 @@ def for_resource(cfg: 'Config', resource: str) -> 'AzureCliTokenSource': return token_source @staticmethod - def get_subscription(cfg: 'Config') -> Optional[str]: + def get_subscription(cfg: "Config") -> Optional[str]: resource = cfg.azure_workspace_resource_id if resource is None or resource == "": return None - components = resource.split('/') + components = resource.split("/") if len(components) < 3: logger.warning("Invalid azure workspace resource ID") return None return components[2] -@credentials_strategy('azure-cli', ['is_azure']) -def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: - """ Adds refreshed OAuth token granted by `az login` command to every request. """ +@credentials_strategy("azure-cli", ["is_azure"]) +def azure_cli(cfg: "Config") -> Optional[CredentialsProvider]: + """Adds refreshed OAuth token granted by `az login` command to every request.""" cfg.load_azure_tenant_id() token_source = None mgmt_token_source = None try: token_source = AzureCliTokenSource.for_resource(cfg, cfg.effective_azure_login_app_id) except FileNotFoundError: - doc = 'https://docs.microsoft.com/en-us/cli/azure/?view=azure-cli-latest' - logger.debug(f'Most likely Azure CLI is not installed. See {doc} for details') + doc = "https://docs.microsoft.com/en-us/cli/azure/?view=azure-cli-latest" + logger.debug(f"Most likely Azure CLI is not installed. See {doc} for details") return None except OSError as e: - logger.debug('skipping Azure CLI auth', exc_info=e) - logger.debug('This may happen if you are attempting to login to a dev or staging workspace') + logger.debug("skipping Azure CLI auth", exc_info=e) + logger.debug("This may happen if you are attempting to login to a dev or staging workspace") return None if not token_source.is_human_user(): @@ -568,7 +639,10 @@ def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: management_endpoint = cfg.arm_environment.service_management_endpoint mgmt_token_source = AzureCliTokenSource.for_resource(cfg, management_endpoint) except Exception as e: - logger.debug(f'Not including service management token in headers', exc_info=e) + logger.debug( + f"Not including service management token in headers", + exc_info=e, + ) mgmt_token_source = None _ensure_host_present(cfg, lambda resource: AzureCliTokenSource.for_resource(cfg, resource)) @@ -576,7 +650,7 @@ def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: def inner() -> Dict[str, str]: token = token_source.token() - headers = {'Authorization': f'{token.token_type} {token.access_token}'} + headers = {"Authorization": f"{token.token_type} {token.access_token}"} add_workspace_id_header(cfg, headers) if mgmt_token_source: add_sp_management_token(mgmt_token_source, headers) @@ -586,12 +660,12 @@ def inner() -> Dict[str, str]: class DatabricksCliTokenSource(CliTokenSource): - """ Obtain the token granted by `databricks auth login` CLI command """ + """Obtain the token granted by `databricks auth login` CLI command""" - def __init__(self, cfg: 'Config'): - args = ['auth', 'token', '--host', cfg.host] + def __init__(self, cfg: "Config"): + args = ["auth", "token", "--host", cfg.host] if cfg.is_account_client: - args += ['--account-id', cfg.account_id] + args += ["--account-id", cfg.account_id] cli_path = cfg.databricks_cli_path @@ -611,10 +685,12 @@ def __init__(self, cfg: 'Config'): elif cli_path.count("/") == 0: cli_path = self.__class__._find_executable(cli_path) - super().__init__(cmd=[cli_path, *args], - token_type_field='token_type', - access_token_field='access_token', - expiry_field='expiry') + super().__init__( + cmd=[cli_path, *args], + token_type_field="token_type", + access_token_field="access_token", + expiry_field="expiry", + ) @staticmethod def _find_executable(name) -> str: @@ -636,8 +712,8 @@ def _find_executable(name) -> str: raise err -@oauth_credentials_strategy('databricks-cli', ['host']) -def databricks_cli(cfg: 'Config') -> Optional[CredentialsProvider]: +@oauth_credentials_strategy("databricks-cli", ["host"]) +def databricks_cli(cfg: "Config") -> Optional[CredentialsProvider]: try: token_source = DatabricksCliTokenSource(cfg) except FileNotFoundError as e: @@ -647,8 +723,8 @@ def databricks_cli(cfg: 'Config') -> Optional[CredentialsProvider]: try: token_source.token() except IOError as e: - if 'databricks OAuth is not' in str(e): - logger.debug(f'OAuth not configured or not available: {e}') + if "databricks OAuth is not" in str(e): + logger.debug(f"OAuth not configured or not available: {e}") return None raise e @@ -656,7 +732,7 @@ def databricks_cli(cfg: 'Config') -> Optional[CredentialsProvider]: def inner() -> Dict[str, str]: token = token_source.token() - return {'Authorization': f'{token.token_type} {token.access_token}'} + return {"Authorization": f"{token.token_type} {token.access_token}"} def token() -> Token: return token_source.token() @@ -665,13 +741,14 @@ def token() -> Token: class MetadataServiceTokenSource(Refreshable): - """ Obtain the token granted by Databricks Metadata Service """ + """Obtain the token granted by Databricks Metadata Service""" + METADATA_SERVICE_VERSION = "1" METADATA_SERVICE_VERSION_HEADER = "X-Databricks-Metadata-Version" METADATA_SERVICE_HOST_HEADER = "X-Databricks-Host" - _metadata_service_timeout = 10 # seconds + _metadata_service_timeout = 10 # seconds - def __init__(self, cfg: 'Config'): + def __init__(self, cfg: "Config"): super().__init__() self.url = cfg.metadata_service_url self.host = cfg.host @@ -682,13 +759,14 @@ def refresh(self) -> Token: timeout=self._metadata_service_timeout, headers={ self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION, - self.METADATA_SERVICE_HOST_HEADER: self.host + self.METADATA_SERVICE_HOST_HEADER: self.host, }, proxies={ # Explicitly exclude localhost from being proxied. This is necessary # for Metadata URLs which typically point to localhost. "no_proxy": "localhost,127.0.0.1" - }) + }, + ) json_resp: dict[str, Union[str, float]] = resp.json() access_token = json_resp.get("access_token", None) if access_token is None: @@ -706,9 +784,9 @@ def refresh(self) -> Token: return Token(access_token=access_token, token_type=token_type, expiry=expiry) -@credentials_strategy('metadata-service', ['host', 'metadata_service_url']) -def metadata_service(cfg: 'Config') -> Optional[CredentialsProvider]: - """ Adds refreshed token granted by Databricks Metadata Service to every request. """ +@credentials_strategy("metadata-service", ["host", "metadata_service_url"]) +def metadata_service(cfg: "Config") -> Optional[CredentialsProvider]: + """Adds refreshed token granted by Databricks Metadata Service to every request.""" token_source = MetadataServiceTokenSource(cfg) token_source.token() @@ -716,14 +794,14 @@ def metadata_service(cfg: 'Config') -> Optional[CredentialsProvider]: def inner() -> Dict[str, str]: token = token_source.token() - return {'Authorization': f'{token.token_type} {token.access_token}'} + return {"Authorization": f"{token.token_type} {token.access_token}"} return inner # This Code is derived from Mlflow DatabricksModelServingConfigProvider # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332 -class ModelServingAuthProvider(): +class ModelServingAuthProvider: USER_CREDENTIALS = "user_credentials" _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token" @@ -731,7 +809,7 @@ class ModelServingAuthProvider(): def __init__(self, credential_type: Optional[str]): self.expiry_time = -1 self.current_token = None - self.refresh_duration = 300 # 300 Seconds + self.refresh_duration = 300 # 300 Seconds self.credential_type = credential_type def should_fetch_model_serving_environment_oauth() -> bool: @@ -740,10 +818,14 @@ def should_fetch_model_serving_environment_oauth() -> bool: Additionally check if the oauth token file path exists """ - is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV") - or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false") - return (is_in_model_serving_env == "true" - and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH)) + is_in_model_serving_env = ( + os.environ.get("IS_IN_DB_MODEL_SERVING_ENV") + or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") + or "false" + ) + return is_in_model_serving_env == "true" and os.path.isfile( + ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH + ) def _get_model_dependency_oauth_token(self, should_retry=True) -> str: # Use Cached value if it is valid @@ -758,8 +840,10 @@ def _get_model_dependency_oauth_token(self, should_retry=True) -> str: except Exception as e: # sleep and retry in case of any race conditions with OAuth refreshing if should_retry: - logger.warning("Unable to read oauth token on first attmept in Model Serving Environment", - exc_info=e) + logger.warning( + "Unable to read oauth token on first attmept in Model Serving Environment", + exc_info=e, + ) time.sleep(0.5) return self._get_model_dependency_oauth_token(should_retry=False) else: @@ -785,8 +869,7 @@ def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: return None # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR - host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get( - "DB_MODEL_SERVING_HOST_URL") + host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get("DB_MODEL_SERVING_HOST_URL") if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS: return (host, self._get_invokers_token()) @@ -794,8 +877,7 @@ def get_databricks_host_token(self) -> Optional[Tuple[str, str]]: return (host, self._get_model_dependency_oauth_token()) -def model_serving_auth_visitor(cfg: 'Config', - credential_type: Optional[str] = None) -> Optional[CredentialsProvider]: +def model_serving_auth_visitor(cfg: "Config", credential_type: Optional[str] = None) -> Optional[CredentialsProvider]: try: model_serving_auth_provider = ModelServingAuthProvider(credential_type) host, token = model_serving_auth_provider.get_databricks_host_token() @@ -806,7 +888,10 @@ def model_serving_auth_visitor(cfg: 'Config', if cfg.host is None: cfg.host = host except Exception as e: - logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e) + logger.warning( + "Unable to get auth from Databricks Model Serving Environment", + exc_info=e, + ) return None logger.info("Using Databricks Model Serving Authentication") @@ -818,8 +903,8 @@ def inner() -> Dict[str, str]: return inner -@credentials_strategy('model-serving', []) -def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: +@credentials_strategy("model-serving", []) +def model_serving_auth(cfg: "Config") -> Optional[CredentialsProvider]: if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): logger.debug("model-serving: Not in Databricks Model Serving, skipping") return None @@ -828,20 +913,30 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]: class DefaultCredentials: - """ Select the first applicable credential provider from the chain """ + """Select the first applicable credential provider from the chain""" def __init__(self) -> None: - self._auth_type = 'default' + self._auth_type = "default" self._auth_providers = [ - pat_auth, basic_auth, metadata_service, oauth_service_principal, azure_service_principal, - github_oidc_azure, azure_cli, external_browser, databricks_cli, runtime_native_auth, - google_credentials, google_id, model_serving_auth + pat_auth, + basic_auth, + metadata_service, + oauth_service_principal, + azure_service_principal, + github_oidc_azure, + azure_cli, + external_browser, + databricks_cli, + runtime_native_auth, + google_credentials, + google_id, + model_serving_auth, ] def auth_type(self) -> str: return self._auth_type - def oauth_token(self, cfg: 'Config') -> Token: + def oauth_token(self, cfg: "Config") -> Token: for provider in self._auth_providers: auth_type = provider.auth_type() if auth_type != self._auth_type: @@ -849,14 +944,14 @@ def oauth_token(self, cfg: 'Config') -> Token: continue return provider.oauth_token(cfg) - def __call__(self, cfg: 'Config') -> CredentialsProvider: + def __call__(self, cfg: "Config") -> CredentialsProvider: for provider in self._auth_providers: auth_type = provider.auth_type() if cfg.auth_type and auth_type != cfg.auth_type: # ignore other auth types if one is explicitly enforced logger.debug(f"Ignoring {auth_type} auth, because {cfg.auth_type} is preferred") continue - logger.debug(f'Attempting to configure auth: {auth_type}') + logger.debug(f"Attempting to configure auth: {auth_type}") try: header_factory = provider(cfg) if not header_factory: @@ -864,18 +959,18 @@ def __call__(self, cfg: 'Config') -> CredentialsProvider: self._auth_type = auth_type return header_factory except Exception as e: - raise ValueError(f'{auth_type}: {e}') from e + raise ValueError(f"{auth_type}: {e}") from e auth_flow_url = "https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication" raise ValueError( - f'cannot configure default credentials, please check {auth_flow_url} to configure credentials for your preferred authentication method.' + f"cannot configure default credentials, please check {auth_flow_url} to configure credentials for your preferred authentication method." ) class ModelServingUserCredentials(CredentialsStrategy): """ - This credential strategy is designed for authenticating the Databricks SDK in the model serving environment using user-specific rights. - In the model serving environment, the strategy retrieves a downscoped user token from the thread-local variable. - In any other environments, the class defaults to the DefaultCredentialStrategy. + This credential strategy is designed for authenticating the Databricks SDK in the model serving environment using user-specific rights. + In the model serving environment, the strategy retrieves a downscoped user token from the thread-local variable. + In any other environments, the class defaults to the DefaultCredentialStrategy. To use this credential strategy, instantiate the WorkspaceClient with the ModelServingUserCredentials strategy as follows: invokers_client = WorkspaceClient(credential_strategy = ModelServingUserCredentials()) @@ -891,7 +986,7 @@ def auth_type(self): else: return self.default_credentials.auth_type() - def __call__(self, cfg: 'Config') -> CredentialsProvider: + def __call__(self, cfg: "Config") -> CredentialsProvider: if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth(): header_factory = model_serving_auth_visitor(cfg, self.credential_type) if not header_factory: diff --git a/databricks/sdk/data_plane.py b/databricks/sdk/data_plane.py index 5ad9b79ad..4da8006ec 100644 --- a/databricks/sdk/data_plane.py +++ b/databricks/sdk/data_plane.py @@ -10,6 +10,7 @@ class DataPlaneDetails: """ Contains details required to query a DataPlane endpoint. """ + endpoint_url: str """URL used to query the endpoint through the DataPlane.""" token: Token @@ -18,6 +19,7 @@ class DataPlaneDetails: class DataPlaneService: """Helper class to fetch and manage DataPlane details.""" + from .service.serving import DataPlaneInfo def __init__(self): @@ -25,8 +27,13 @@ def __init__(self): self._tokens = {} self._lock = threading.Lock() - def get_data_plane_details(self, method: str, params: List[str], info_getter: Callable[[], DataPlaneInfo], - refresh: Callable[[str], Token]): + def get_data_plane_details( + self, + method: str, + params: List[str], + info_getter: Callable[[], DataPlaneInfo], + refresh: Callable[[str], Token], + ): """Get and cache information required to query a Data Plane endpoint using the provided methods. Returns a cached DataPlaneDetails if the details have already been fetched previously and are still valid. diff --git a/databricks/sdk/dbutils.py b/databricks/sdk/dbutils.py index 7eda543b6..af5ebb821 100644 --- a/databricks/sdk/dbutils.py +++ b/databricks/sdk/dbutils.py @@ -12,123 +12,136 @@ from .mixins import files as dbfs_ext from .service import compute, workspace -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") -class FileInfo(namedtuple('FileInfo', ['path', 'name', 'size', "modificationTime"])): +class FileInfo(namedtuple("FileInfo", ["path", "name", "size", "modificationTime"])): pass -class MountInfo(namedtuple('MountInfo', ['mountPoint', 'source', 'encryptionType'])): +class MountInfo(namedtuple("MountInfo", ["mountPoint", "source", "encryptionType"])): pass -class SecretScope(namedtuple('SecretScope', ['name'])): +class SecretScope(namedtuple("SecretScope", ["name"])): def getName(self): return self.name -class SecretMetadata(namedtuple('SecretMetadata', ['key'])): +class SecretMetadata(namedtuple("SecretMetadata", ["key"])): pass class _FsUtil: - """ Manipulates the Databricks filesystem (DBFS) """ + """Manipulates the Databricks filesystem (DBFS)""" - def __init__(self, dbfs_ext: dbfs_ext.DbfsExt, proxy_factory: Callable[[str], '_ProxyUtil']): + def __init__( + self, + dbfs_ext: dbfs_ext.DbfsExt, + proxy_factory: Callable[[str], "_ProxyUtil"], + ): self._dbfs = dbfs_ext self._proxy_factory = proxy_factory def cp(self, from_: str, to: str, recurse: bool = False) -> bool: - """Copies a file or directory, possibly across FileSystems """ + """Copies a file or directory, possibly across FileSystems""" self._dbfs.copy(from_, to, recursive=recurse) return True def head(self, file: str, maxBytes: int = 65536) -> str: - """Returns up to the first 'maxBytes' bytes of the given file as a String encoded in UTF-8 """ + """Returns up to the first 'maxBytes' bytes of the given file as a String encoded in UTF-8""" with self._dbfs.download(file) as f: - return f.read(maxBytes).decode('utf8') + return f.read(maxBytes).decode("utf8") def ls(self, dir: str) -> List[FileInfo]: - """Lists the contents of a directory """ + """Lists the contents of a directory""" return [ - FileInfo(f.path, os.path.basename(f.path), f.file_size, f.modification_time) + FileInfo( + f.path, + os.path.basename(f.path), + f.file_size, + f.modification_time, + ) for f in self._dbfs.list(dir) ] def mkdirs(self, dir: str) -> bool: - """Creates the given directory if it does not exist, also creating any necessary parent directories """ + """Creates the given directory if it does not exist, also creating any necessary parent directories""" self._dbfs.mkdirs(dir) return True def mv(self, from_: str, to: str, recurse: bool = False) -> bool: - """Moves a file or directory, possibly across FileSystems """ + """Moves a file or directory, possibly across FileSystems""" self._dbfs.move_(from_, to, recursive=recurse, overwrite=True) return True def put(self, file: str, contents: str, overwrite: bool = False) -> bool: - """Writes the given String out to a file, encoded in UTF-8 """ + """Writes the given String out to a file, encoded in UTF-8""" with self._dbfs.open(file, write=True, overwrite=overwrite) as f: - f.write(contents.encode('utf8')) + f.write(contents.encode("utf8")) return True def rm(self, dir: str, recurse: bool = False) -> bool: - """Removes a file or directory """ + """Removes a file or directory""" self._dbfs.delete(dir, recursive=recurse) return True - def mount(self, - source: str, - mount_point: str, - encryption_type: str = None, - owner: str = None, - extra_configs: Dict[str, str] = None) -> bool: + def mount( + self, + source: str, + mount_point: str, + encryption_type: str = None, + owner: str = None, + extra_configs: Dict[str, str] = None, + ) -> bool: """Mounts the given source directory into DBFS at the given mount point""" - fs = self._proxy_factory('fs') + fs = self._proxy_factory("fs") kwargs = {} if encryption_type: - kwargs['encryption_type'] = encryption_type + kwargs["encryption_type"] = encryption_type if owner: - kwargs['owner'] = owner + kwargs["owner"] = owner if extra_configs: - kwargs['extra_configs'] = extra_configs + kwargs["extra_configs"] = extra_configs return fs.mount(source, mount_point, **kwargs) def unmount(self, mount_point: str) -> bool: """Deletes a DBFS mount point""" - fs = self._proxy_factory('fs') + fs = self._proxy_factory("fs") return fs.unmount(mount_point) - def updateMount(self, - source: str, - mount_point: str, - encryption_type: str = None, - owner: str = None, - extra_configs: Dict[str, str] = None) -> bool: - """ Similar to mount(), but updates an existing mount point (if present) instead of creating a new one """ - fs = self._proxy_factory('fs') + def updateMount( + self, + source: str, + mount_point: str, + encryption_type: str = None, + owner: str = None, + extra_configs: Dict[str, str] = None, + ) -> bool: + """Similar to mount(), but updates an existing mount point (if present) instead of creating a new one""" + fs = self._proxy_factory("fs") kwargs = {} if encryption_type: - kwargs['encryption_type'] = encryption_type + kwargs["encryption_type"] = encryption_type if owner: - kwargs['owner'] = owner + kwargs["owner"] = owner if extra_configs: - kwargs['extra_configs'] = extra_configs + kwargs["extra_configs"] = extra_configs return fs.updateMount(source, mount_point, **kwargs) def mounts(self) -> List[MountInfo]: - """ Displays information about what is mounted within DBFS """ + """Displays information about what is mounted within DBFS""" result = [] - fs = self._proxy_factory('fs') + fs = self._proxy_factory("fs") for info in fs.mounts(): result.append(MountInfo(info[0], info[1], info[2])) return result def refreshMounts(self) -> bool: - """ Forces all machines in this cluster to refresh their mount cache, - ensuring they receive the most recent information """ - fs = self._proxy_factory('fs') + """Forces all machines in this cluster to refresh their mount cache, + ensuring they receive the most recent information""" + fs = self._proxy_factory("fs") return fs.refreshMounts() @@ -136,13 +149,13 @@ class _SecretsUtil: """Remote equivalent of secrets util""" def __init__(self, secrets_api: workspace.SecretsAPI): - self._api = secrets_api # nolint + self._api = secrets_api # nolint def getBytes(self, scope: str, key: str) -> bytes: """Gets the bytes representation of a secret value for the specified scope and key.""" - query = {'scope': scope, 'key': key} - raw = self._api._api.do('GET', '/api/2.0/secrets/get', query=query) - return base64.b64decode(raw['value']) + query = {"scope": scope, "key": key} + raw = self._api._api.do("GET", "/api/2.0/secrets/get", query=query) + return base64.b64decode(raw["value"]) def get(self, scope: str, key: str) -> str: """Gets the string representation of a secret value for the specified secrets scope and key.""" @@ -169,13 +182,19 @@ class _JobsUtil: class _TaskValuesUtil: """Remote equivalent of task values util""" - def get(self, taskKey: str, key: str, default: any = None, debugValue: any = None) -> None: + def get( + self, + taskKey: str, + key: str, + default: any = None, + debugValue: any = None, + ) -> None: """ Returns `debugValue` if present, throws an error otherwise as this implementation is always run outside of a job run """ if debugValue is None: raise TypeError( - 'Must pass debugValue when calling get outside of a job context. debugValue cannot be None.' + "Must pass debugValue when calling get outside of a job context. debugValue cannot be None." ) return debugValue @@ -190,7 +209,7 @@ def __init__(self) -> None: class RemoteDbUtils: - def __init__(self, config: 'Config' = None): + def __init__(self, config: "Config" = None): self._config = Config() if not config else config self._client = ApiClient(self._config) self._clusters = compute_ext.ClustersExt(self._client) @@ -211,6 +230,7 @@ def __init__(self, config: 'Config' = None): def widgets(self): if self._widgets is None: from ._widgets import widget_impl + self._widgets = widget_impl() return self._widgets @@ -219,7 +239,7 @@ def widgets(self): def _cluster_id(self) -> str: cluster_id = self._config.cluster_id if not cluster_id: - message = 'cluster_id is required in the configuration' + message = "cluster_id is required in the configuration" raise ValueError(self._config.wrap_debug_info(message)) return cluster_id @@ -230,15 +250,16 @@ def _running_command_context(self) -> compute.ContextStatusResponse: if self._ctx: return self._ctx self._clusters.ensure_cluster_is_running(self._cluster_id) - self._ctx = self._commands.create(cluster_id=self._cluster_id, - language=compute.Language.PYTHON).result() + self._ctx = self._commands.create(cluster_id=self._cluster_id, language=compute.Language.PYTHON).result() return self._ctx - def __getattr__(self, util) -> '_ProxyUtil': - return _ProxyUtil(command_execution=self._commands, - context_factory=self._running_command_context, - cluster_id=self._cluster_id, - name=util) + def __getattr__(self, util) -> "_ProxyUtil": + return _ProxyUtil( + command_execution=self._commands, + context_factory=self._running_command_context, + cluster_id=self._cluster_id, + name=util, + ) @dataclass @@ -273,8 +294,7 @@ def __init__(self, name: str): # This means, it is completely safe to override paths starting with `{util}.{attribute}.`, since none of the prefixes # are being proxied to remote dbutils currently. proxy_override_paths = { - 'notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()': - get_local_notebook_path, + "notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()": get_local_notebook_path, } @classmethod @@ -294,7 +314,8 @@ def __run_override(self, path: str) -> Optional[OverrideResult]: def __call__(self, *args, **kwds) -> Any: if len(args) != 0 or len(kwds) != 0: raise TypeError( - f"Arguments are not supported for overridden method {self._name}. Invoke as: {self._name}()") + f"Arguments are not supported for overridden method {self._name}. Invoke as: {self._name}()" + ) callable_path = f"{self._name}()" result = self.__run_override(callable_path) @@ -314,8 +335,14 @@ def __getattr__(self, method: str) -> Any: class _ProxyUtil: """Enables temporary workaround to call remote in-REPL dbutils without having to re-implement them""" - def __init__(self, *, command_execution: compute.CommandExecutionAPI, - context_factory: Callable[[], compute.ContextStatusResponse], cluster_id: str, name: str): + def __init__( + self, + *, + command_execution: compute.CommandExecutionAPI, + context_factory: Callable[[], compute.ContextStatusResponse], + cluster_id: str, + name: str, + ): self._commands = command_execution self._cluster_id = cluster_id self._context_factory = context_factory @@ -324,16 +351,18 @@ def __init__(self, *, command_execution: compute.CommandExecutionAPI, def __call__(self): raise NotImplementedError(f"dbutils.{self._name} is not callable") - def __getattr__(self, method: str) -> '_ProxyCall | _ProxyUtil | _OverrideProxyUtil': + def __getattr__(self, method: str) -> "_ProxyCall | _ProxyUtil | _OverrideProxyUtil": override = _OverrideProxyUtil.new(f"{self._name}.{method}") if override: return override - return _ProxyCall(command_execution=self._commands, - cluster_id=self._cluster_id, - context_factory=self._context_factory, - util=self._name, - method=method) + return _ProxyCall( + command_execution=self._commands, + cluster_id=self._cluster_id, + context_factory=self._context_factory, + util=self._name, + method=method, + ) import html @@ -342,29 +371,34 @@ def __getattr__(self, method: str) -> '_ProxyCall | _ProxyUtil | _OverrideProxyU class _ProxyCall: - def __init__(self, *, command_execution: compute.CommandExecutionAPI, - context_factory: Callable[[], compute.ContextStatusResponse], cluster_id: str, util: str, - method: str): + def __init__( + self, + *, + command_execution: compute.CommandExecutionAPI, + context_factory: Callable[[], compute.ContextStatusResponse], + cluster_id: str, + util: str, + method: str, + ): self._commands = command_execution self._cluster_id = cluster_id self._context_factory = context_factory self._util = util self._method = method - _out_re = re.compile(r'Out\[[\d\s]+]:\s') - _tag_re = re.compile(r'<[^>]*>') - _exception_re = re.compile(r'.*Exception:\s+(.*)') - _execution_error_re = re.compile( - r'ExecutionError: ([\s\S]*)\n(StatusCode=[0-9]*)\n(StatusDescription=.*)\n') - _error_message_re = re.compile(r'ErrorMessage=(.+)\n') - _ascii_escape_re = re.compile(r'(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]') + _out_re = re.compile(r"Out\[[\d\s]+]:\s") + _tag_re = re.compile(r"<[^>]*>") + _exception_re = re.compile(r".*Exception:\s+(.*)") + _execution_error_re = re.compile(r"ExecutionError: ([\s\S]*)\n(StatusCode=[0-9]*)\n(StatusDescription=.*)\n") + _error_message_re = re.compile(r"ErrorMessage=(.+)\n") + _ascii_escape_re = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]") def _is_failed(self, results: compute.Results) -> bool: return results.result_type == compute.ResultType.ERROR def _text(self, results: compute.Results) -> str: if results.result_type != compute.ResultType.TEXT: - return '' + return "" return self._out_re.sub("", str(results.data)) def _raise_if_failed(self, results: compute.Results): @@ -399,17 +433,19 @@ def _error_from_results(self, results: compute.Results): def __call__(self, *args, **kwargs): raw = json.dumps((args, kwargs)) - code = f''' + code = f""" import json (args, kwargs) = json.loads('{raw}') result = dbutils.{self._util}.{self._method}(*args, **kwargs) dbutils.notebook.exit(json.dumps(result)) - ''' + """ ctx = self._context_factory() - result = self._commands.execute(cluster_id=self._cluster_id, - language=compute.Language.PYTHON, - context_id=ctx.id, - command=code).result() + result = self._commands.execute( + cluster_id=self._cluster_id, + language=compute.Language.PYTHON, + context_id=ctx.id, + command=code, + ).result() if result.status == compute.CommandStatus.FINISHED: self._raise_if_failed(result.results) raw = result.results.data diff --git a/databricks/sdk/environments.py b/databricks/sdk/environments.py index 10d647708..170767a09 100644 --- a/databricks/sdk/environments.py +++ b/databricks/sdk/environments.py @@ -14,18 +14,24 @@ class AzureEnvironment: ARM_DATABRICKS_RESOURCE_ID = "2ff814a6-3304-4ab8-85cb-cd0e6f879c1d" ENVIRONMENTS = dict( - PUBLIC=AzureEnvironment(name="PUBLIC", - service_management_endpoint="https://management.core.windows.net/", - resource_manager_endpoint="https://management.azure.com/", - active_directory_endpoint="https://login.microsoftonline.com/"), - USGOVERNMENT=AzureEnvironment(name="USGOVERNMENT", - service_management_endpoint="https://management.core.usgovcloudapi.net/", - resource_manager_endpoint="https://management.usgovcloudapi.net/", - active_directory_endpoint="https://login.microsoftonline.us/"), - CHINA=AzureEnvironment(name="CHINA", - service_management_endpoint="https://management.core.chinacloudapi.cn/", - resource_manager_endpoint="https://management.chinacloudapi.cn/", - active_directory_endpoint="https://login.chinacloudapi.cn/"), + PUBLIC=AzureEnvironment( + name="PUBLIC", + service_management_endpoint="https://management.core.windows.net/", + resource_manager_endpoint="https://management.azure.com/", + active_directory_endpoint="https://login.microsoftonline.com/", + ), + USGOVERNMENT=AzureEnvironment( + name="USGOVERNMENT", + service_management_endpoint="https://management.core.usgovcloudapi.net/", + resource_manager_endpoint="https://management.usgovcloudapi.net/", + active_directory_endpoint="https://login.microsoftonline.us/", + ), + CHINA=AzureEnvironment( + name="CHINA", + service_management_endpoint="https://management.core.chinacloudapi.cn/", + resource_manager_endpoint="https://management.chinacloudapi.cn/", + active_directory_endpoint="https://login.chinacloudapi.cn/", + ), ) @@ -69,30 +75,41 @@ def azure_active_directory_endpoint(self) -> Optional[str]: ALL_ENVS = [ DatabricksEnvironment(Cloud.AWS, ".dev.databricks.com"), DatabricksEnvironment(Cloud.AWS, ".staging.cloud.databricks.com"), - DatabricksEnvironment(Cloud.AWS, ".cloud.databricks.us"), DEFAULT_ENVIRONMENT, - DatabricksEnvironment(Cloud.AZURE, - ".dev.azuredatabricks.net", - azure_application_id="62a912ac-b58e-4c1d-89ea-b2dbfc7358fc", - azure_environment=ENVIRONMENTS["PUBLIC"]), - DatabricksEnvironment(Cloud.AZURE, - ".staging.azuredatabricks.net", - azure_application_id="4a67d088-db5c-48f1-9ff2-0aace800ae68", - azure_environment=ENVIRONMENTS["PUBLIC"]), - DatabricksEnvironment(Cloud.AZURE, - ".azuredatabricks.net", - azure_application_id=ARM_DATABRICKS_RESOURCE_ID, - azure_environment=ENVIRONMENTS["PUBLIC"]), - DatabricksEnvironment(Cloud.AZURE, - ".databricks.azure.us", - azure_application_id=ARM_DATABRICKS_RESOURCE_ID, - azure_environment=ENVIRONMENTS["USGOVERNMENT"]), - DatabricksEnvironment(Cloud.AZURE, - ".databricks.azure.cn", - azure_application_id=ARM_DATABRICKS_RESOURCE_ID, - azure_environment=ENVIRONMENTS["CHINA"]), + DatabricksEnvironment(Cloud.AWS, ".cloud.databricks.us"), + DEFAULT_ENVIRONMENT, + DatabricksEnvironment( + Cloud.AZURE, + ".dev.azuredatabricks.net", + azure_application_id="62a912ac-b58e-4c1d-89ea-b2dbfc7358fc", + azure_environment=ENVIRONMENTS["PUBLIC"], + ), + DatabricksEnvironment( + Cloud.AZURE, + ".staging.azuredatabricks.net", + azure_application_id="4a67d088-db5c-48f1-9ff2-0aace800ae68", + azure_environment=ENVIRONMENTS["PUBLIC"], + ), + DatabricksEnvironment( + Cloud.AZURE, + ".azuredatabricks.net", + azure_application_id=ARM_DATABRICKS_RESOURCE_ID, + azure_environment=ENVIRONMENTS["PUBLIC"], + ), + DatabricksEnvironment( + Cloud.AZURE, + ".databricks.azure.us", + azure_application_id=ARM_DATABRICKS_RESOURCE_ID, + azure_environment=ENVIRONMENTS["USGOVERNMENT"], + ), + DatabricksEnvironment( + Cloud.AZURE, + ".databricks.azure.cn", + azure_application_id=ARM_DATABRICKS_RESOURCE_ID, + azure_environment=ENVIRONMENTS["CHINA"], + ), DatabricksEnvironment(Cloud.GCP, ".dev.gcp.databricks.com"), DatabricksEnvironment(Cloud.GCP, ".staging.gcp.databricks.com"), - DatabricksEnvironment(Cloud.GCP, ".gcp.databricks.com") + DatabricksEnvironment(Cloud.GCP, ".gcp.databricks.com"), ] diff --git a/databricks/sdk/errors/base.py b/databricks/sdk/errors/base.py index 973c3644e..8a105e972 100644 --- a/databricks/sdk/errors/base.py +++ b/databricks/sdk/errors/base.py @@ -8,40 +8,45 @@ class ErrorDetail: - def __init__(self, - type: str = None, - reason: str = None, - domain: str = None, - metadata: dict = None, - **kwargs): + def __init__( + self, + type: str = None, + reason: str = None, + domain: str = None, + metadata: dict = None, + **kwargs, + ): self.type = type self.reason = reason self.domain = domain self.metadata = metadata @classmethod - def from_dict(cls, d: Dict[str, any]) -> 'ErrorDetail': - if '@type' in d: - d['type'] = d['@type'] + def from_dict(cls, d: Dict[str, any]) -> "ErrorDetail": + if "@type" in d: + d["type"] = d["@type"] return cls(**d) class DatabricksError(IOError): - """ Generic error from Databricks REST API """ + """Generic error from Databricks REST API""" + # Known ErrorDetail types _error_info_type = "type.googleapis.com/google.rpc.ErrorInfo" - def __init__(self, - message: str = None, - *, - error_code: str = None, - detail: str = None, - status: str = None, - scimType: str = None, - error: str = None, - retry_after_secs: int = None, - details: List[Dict[str, any]] = None, - **kwargs): + def __init__( + self, + message: str = None, + *, + error_code: str = None, + detail: str = None, + status: str = None, + scimType: str = None, + error: str = None, + retry_after_secs: int = None, + details: List[Dict[str, any]] = None, + **kwargs, + ): """ :param message: @@ -132,8 +137,8 @@ def matches(self, response: requests.Response, raw_error: dict): return False if self.status_code_matcher and not self.status_code_matcher.match(str(response.status_code)): return False - if self.error_code_matcher and not self.error_code_matcher.match(raw_error.get('error_code', '')): + if self.error_code_matcher and not self.error_code_matcher.match(raw_error.get("error_code", "")): return False - if self.message_matcher and not self.message_matcher.match(raw_error.get('message', '')): + if self.message_matcher and not self.message_matcher.match(raw_error.get("message", "")): return False return True diff --git a/databricks/sdk/errors/customizer.py b/databricks/sdk/errors/customizer.py index 5c895becc..6a760b626 100644 --- a/databricks/sdk/errors/customizer.py +++ b/databricks/sdk/errors/customizer.py @@ -24,7 +24,7 @@ def _parse_retry_after(cls, response: requests.Response) -> int: retry_after = response.headers.get("Retry-After") if retry_after is None: logging.debug( - f'No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}' + f"No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}" ) # 429 requests should include a `Retry-After` header, but if it's missing, # we default to 1 second. @@ -40,11 +40,11 @@ def _parse_retry_after(cls, response: requests.Response) -> int: return int(retry_after) except ValueError: logging.debug( - f'Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}' + f"Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}" ) # defaulting to 1 sleep second to make self._is_retryable() simpler return cls._DEFAULT_RETRY_AFTER_SECONDS def customize_error(self, response: requests.Response, kwargs: dict): if response.status_code in (429, 503): - kwargs['retry_after_secs'] = self._parse_retry_after(response) + kwargs["retry_after_secs"] = self._parse_retry_after(response) diff --git a/databricks/sdk/errors/deserializer.py b/databricks/sdk/errors/deserializer.py index 4da01ee68..5a6e0da09 100644 --- a/databricks/sdk/errors/deserializer.py +++ b/databricks/sdk/errors/deserializer.py @@ -20,7 +20,7 @@ class _EmptyDeserializer(_ErrorDeserializer): def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: if len(response_body) == 0: - return {'message': response.reason} + return {"message": response.reason} return None @@ -31,39 +31,45 @@ class _StandardErrorDeserializer(_ErrorDeserializer): def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: try: - payload_str = response_body.decode('utf-8') + payload_str = response_body.decode("utf-8") resp = json.loads(payload_str) except UnicodeDecodeError as e: - logging.debug('_StandardErrorParser: unable to decode response using utf-8', exc_info=e) + logging.debug( + "_StandardErrorParser: unable to decode response using utf-8", + exc_info=e, + ) return None except json.JSONDecodeError as e: - logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e) + logging.debug( + "_StandardErrorParser: unable to deserialize response as json", + exc_info=e, + ) return None if not isinstance(resp, dict): - logging.debug('_StandardErrorParser: response is valid JSON but not a dictionary') + logging.debug("_StandardErrorParser: response is valid JSON but not a dictionary") return None error_args = { - 'message': resp.get('message', 'request failed'), - 'error_code': resp.get('error_code'), - 'details': resp.get('details'), + "message": resp.get("message", "request failed"), + "error_code": resp.get("error_code"), + "details": resp.get("details"), } # Handle API 1.2-style errors - if 'error' in resp: - error_args['message'] = resp['error'] + if "error" in resp: + error_args["message"] = resp["error"] # Handle SCIM Errors - detail = resp.get('detail') - status = resp.get('status') - scim_type = resp.get('scimType') + detail = resp.get("detail") + status = resp.get("status") + scim_type = resp.get("scimType") if detail: # Handle SCIM error message details # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 if detail == "null": detail = "SCIM API Internal Error" - error_args['message'] = f"{scim_type} {detail}".strip(" ") - error_args['error_code'] = f"SCIM_{status}" + error_args["message"] = f"{scim_type} {detail}".strip(" ") + error_args["error_code"] = f"SCIM_{status}" return error_args @@ -72,16 +78,20 @@ class _StringErrorDeserializer(_ErrorDeserializer): Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE". """ - __STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)') + __STRING_ERROR_REGEX = re.compile(r"([A-Z_]+): (.*)") def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: - payload_str = response_body.decode('utf-8') + payload_str = response_body.decode("utf-8") match = self.__STRING_ERROR_REGEX.match(payload_str) if not match: - logging.debug('_StringErrorParser: unable to parse response as string') + logging.debug("_StringErrorParser: unable to parse response as string") return None error_code, message = match.groups() - return {'error_code': error_code, 'message': message, 'status': response.status_code, } + return { + "error_code": error_code, + "message": message, + "status": response.status_code, + } class _HtmlErrorDeserializer(_ErrorDeserializer): @@ -89,18 +99,21 @@ class _HtmlErrorDeserializer(_ErrorDeserializer): Parses errors from the Databricks REST API in HTML format. """ - __HTML_ERROR_REGEXES = [re.compile(r'
(.*)
'), re.compile(r'(.*)'), ] + __HTML_ERROR_REGEXES = [ + re.compile(r"
(.*)
"), + re.compile(r"(.*)"), + ] def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: - payload_str = response_body.decode('utf-8') + payload_str = response_body.decode("utf-8") for regex in self.__HTML_ERROR_REGEXES: match = regex.search(payload_str) if match: message = match.group(1) if match.group(1) else response.reason return { - 'status': response.status_code, - 'message': message, - 'error_code': response.reason.upper().replace(' ', '_') + "status": response.status_code, + "message": message, + "error_code": response.reason.upper().replace(" ", "_"), } - logging.debug('_HtmlErrorParser: no
 tag found in error response')
+        logging.debug("_HtmlErrorParser: no 
 tag found in error response")
         return None
diff --git a/databricks/sdk/errors/mapper.py b/databricks/sdk/errors/mapper.py
index 282b09c76..c3bb5b54c 100644
--- a/databricks/sdk/errors/mapper.py
+++ b/databricks/sdk/errors/mapper.py
@@ -11,7 +11,7 @@ def _error_mapper(response: requests.Response, raw: dict) -> DatabricksError:
         if override.matches(response, raw):
             return override.custom_error(**raw)
     status_code = response.status_code
-    error_code = raw.get('error_code', None)
+    error_code = raw.get("error_code", None)
     if error_code in platform.ERROR_CODE_MAPPING:
         # more specific error codes override more generic HTTP status codes
         return platform.ERROR_CODE_MAPPING[error_code](**raw)
diff --git a/databricks/sdk/errors/overrides.py b/databricks/sdk/errors/overrides.py
index 840bdcfcb..08311fa9f 100644
--- a/databricks/sdk/errors/overrides.py
+++ b/databricks/sdk/errors/overrides.py
@@ -6,28 +6,31 @@
 from .platform import ResourceDoesNotExist
 
 _ALL_OVERRIDES = [
-    _ErrorOverride(debug_name="Clusters InvalidParameterValue=>ResourceDoesNotExist",
-                   path_regex=re.compile(r'^/api/2\.\d/clusters/get'),
-                   verb="GET",
-                   status_code_matcher=re.compile(r'^400$'),
-                   error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'),
-                   message_matcher=re.compile(r'Cluster .* does not exist'),
-                   custom_error=ResourceDoesNotExist,
-                   ),
-    _ErrorOverride(debug_name="Jobs InvalidParameterValue=>ResourceDoesNotExist",
-                   path_regex=re.compile(r'^/api/2\.\d/jobs/get'),
-                   verb="GET",
-                   status_code_matcher=re.compile(r'^400$'),
-                   error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'),
-                   message_matcher=re.compile(r'Job .* does not exist'),
-                   custom_error=ResourceDoesNotExist,
-                   ),
-    _ErrorOverride(debug_name="Job Runs InvalidParameterValue=>ResourceDoesNotExist",
-                   path_regex=re.compile(r'^/api/2\.\d/jobs/runs/get'),
-                   verb="GET",
-                   status_code_matcher=re.compile(r'^400$'),
-                   error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'),
-                   message_matcher=re.compile(r'(Run .* does not exist|Run: .* in job: .* doesn\'t exist)'),
-                   custom_error=ResourceDoesNotExist,
-                   ),
+    _ErrorOverride(
+        debug_name="Clusters InvalidParameterValue=>ResourceDoesNotExist",
+        path_regex=re.compile(r"^/api/2\.\d/clusters/get"),
+        verb="GET",
+        status_code_matcher=re.compile(r"^400$"),
+        error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"),
+        message_matcher=re.compile(r"Cluster .* does not exist"),
+        custom_error=ResourceDoesNotExist,
+    ),
+    _ErrorOverride(
+        debug_name="Jobs InvalidParameterValue=>ResourceDoesNotExist",
+        path_regex=re.compile(r"^/api/2\.\d/jobs/get"),
+        verb="GET",
+        status_code_matcher=re.compile(r"^400$"),
+        error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"),
+        message_matcher=re.compile(r"Job .* does not exist"),
+        custom_error=ResourceDoesNotExist,
+    ),
+    _ErrorOverride(
+        debug_name="Job Runs InvalidParameterValue=>ResourceDoesNotExist",
+        path_regex=re.compile(r"^/api/2\.\d/jobs/runs/get"),
+        verb="GET",
+        status_code_matcher=re.compile(r"^400$"),
+        error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"),
+        message_matcher=re.compile(r"(Run .* does not exist|Run: .* in job: .* doesn\'t exist)"),
+        custom_error=ResourceDoesNotExist,
+    ),
 ]
diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py
index 3408964fe..5861df770 100644
--- a/databricks/sdk/errors/parser.py
+++ b/databricks/sdk/errors/parser.py
@@ -26,7 +26,9 @@
 # A list of _ErrorCustomizers that are applied to the error arguments after they are parsed. Customizers can modify the
 # error arguments in any way, including adding or removing fields. Customizers are applied in order, so later
 # customizers can override the changes made by earlier customizers.
-_error_customizers = [_RetryAfterCustomizer(), ]
+_error_customizers = [
+    _RetryAfterCustomizer(),
+]
 
 
 def _unknown_error(response: requests.Response) -> str:
@@ -36,9 +38,10 @@ def _unknown_error(response: requests.Response) -> str:
     """
     request_log = RoundTrip(response, debug_headers=True, debug_truncate_bytes=10 * 1024).generate()
     return (
-        'This is likely a bug in the Databricks SDK for Python or the underlying '
-        'API. Please report this issue with the following debugging information to the SDK issue tracker at '
-        f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
+        "This is likely a bug in the Databricks SDK for Python or the underlying "
+        "API. Please report this issue with the following debugging information to the SDK issue tracker at "
+        f"https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```"
+    )
 
 
 class _Parser:
@@ -49,13 +52,15 @@ class _Parser:
     issue tracker.
     """
 
-    def __init__(self,
-                 extra_error_parsers: List[_ErrorDeserializer] = [],
-                 extra_error_customizers: List[_ErrorCustomizer] = []):
-        self._error_parsers = _error_deserializers + (extra_error_parsers
-                                                      if extra_error_parsers is not None else [])
-        self._error_customizers = _error_customizers + (extra_error_customizers
-                                                        if extra_error_customizers is not None else [])
+    def __init__(
+        self,
+        extra_error_parsers: List[_ErrorDeserializer] = [],
+        extra_error_customizers: List[_ErrorCustomizer] = [],
+    ):
+        self._error_parsers = _error_deserializers + (extra_error_parsers if extra_error_parsers is not None else [])
+        self._error_customizers = _error_customizers + (
+            extra_error_customizers if extra_error_customizers is not None else []
+        )
 
     def get_api_error(self, response: requests.Response) -> Optional[DatabricksError]:
         """
@@ -73,9 +78,14 @@ def get_api_error(self, response: requests.Response) -> Optional[DatabricksError
                             customizer.customize_error(response, error_args)
                         return _error_mapper(response, error_args)
                 except Exception as e:
-                    logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
-            return _error_mapper(response,
-                                 {'message': 'unable to parse response. ' + _unknown_error(response)})
+                    logging.debug(
+                        f"Error parsing response with {parser}, continuing",
+                        exc_info=e,
+                    )
+            return _error_mapper(
+                response,
+                {"message": "unable to parse response. " + _unknown_error(response)},
+            )
 
         # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
         # is successful, but the response is not what we expect. We need to handle this case separately.
diff --git a/databricks/sdk/errors/platform.py b/databricks/sdk/errors/platform.py
index 0d923a75c..a5b3cb111 100755
--- a/databricks/sdk/errors/platform.py
+++ b/databricks/sdk/errors/platform.py
@@ -103,14 +103,14 @@ class DataLoss(InternalError):
 }
 
 ERROR_CODE_MAPPING = {
-    'INVALID_STATE': InvalidState,
-    'INVALID_PARAMETER_VALUE': InvalidParameterValue,
-    'RESOURCE_DOES_NOT_EXIST': ResourceDoesNotExist,
-    'ABORTED': Aborted,
-    'ALREADY_EXISTS': AlreadyExists,
-    'RESOURCE_ALREADY_EXISTS': ResourceAlreadyExists,
-    'RESOURCE_EXHAUSTED': ResourceExhausted,
-    'REQUEST_LIMIT_EXCEEDED': RequestLimitExceeded,
-    'UNKNOWN': Unknown,
-    'DATA_LOSS': DataLoss,
+    "INVALID_STATE": InvalidState,
+    "INVALID_PARAMETER_VALUE": InvalidParameterValue,
+    "RESOURCE_DOES_NOT_EXIST": ResourceDoesNotExist,
+    "ABORTED": Aborted,
+    "ALREADY_EXISTS": AlreadyExists,
+    "RESOURCE_ALREADY_EXISTS": ResourceAlreadyExists,
+    "RESOURCE_EXHAUSTED": ResourceExhausted,
+    "REQUEST_LIMIT_EXCEEDED": RequestLimitExceeded,
+    "UNKNOWN": Unknown,
+    "DATA_LOSS": DataLoss,
 }
diff --git a/databricks/sdk/errors/private_link.py b/databricks/sdk/errors/private_link.py
index 946b41b50..e188b59e1 100644
--- a/databricks/sdk/errors/private_link.py
+++ b/databricks/sdk/errors/private_link.py
@@ -15,29 +15,28 @@ class _PrivateLinkInfo:
 
     def error_message(self):
         return (
-            f'The requested workspace has {self.serviceName} enabled and is not accessible from the current network. '
-            f'Ensure that {self.serviceName} is properly configured and that your device has access to the '
-            f'{self.endpointName}. For more information, see {self.referencePage}.')
+            f"The requested workspace has {self.serviceName} enabled and is not accessible from the current network. "
+            f"Ensure that {self.serviceName} is properly configured and that your device has access to the "
+            f"{self.endpointName}. For more information, see {self.referencePage}."
+        )
 
 
 _private_link_info_map = {
-    Cloud.AWS:
-    _PrivateLinkInfo(serviceName='AWS PrivateLink',
-                     endpointName='AWS VPC endpoint',
-                     referencePage='https://docs.databricks.com/en/security/network/classic/privatelink.html',
-                     ),
-    Cloud.AZURE:
-    _PrivateLinkInfo(
-        serviceName='Azure Private Link',
-        endpointName='Azure Private Link endpoint',
-        referencePage='https://learn.microsoft.com/en-us/azure/databricks/security/network/classic/private-link-standard#authentication-troubleshooting',
+    Cloud.AWS: _PrivateLinkInfo(
+        serviceName="AWS PrivateLink",
+        endpointName="AWS VPC endpoint",
+        referencePage="https://docs.databricks.com/en/security/network/classic/privatelink.html",
+    ),
+    Cloud.AZURE: _PrivateLinkInfo(
+        serviceName="Azure Private Link",
+        endpointName="Azure Private Link endpoint",
+        referencePage="https://learn.microsoft.com/en-us/azure/databricks/security/network/classic/private-link-standard#authentication-troubleshooting",
+    ),
+    Cloud.GCP: _PrivateLinkInfo(
+        serviceName="Private Service Connect",
+        endpointName="GCP VPC endpoint",
+        referencePage="https://docs.gcp.databricks.com/en/security/network/classic/private-service-connect.html",
     ),
-    Cloud.GCP:
-    _PrivateLinkInfo(
-        serviceName='Private Service Connect',
-        endpointName='GCP VPC endpoint',
-        referencePage='https://docs.gcp.databricks.com/en/security/network/classic/private-service-connect.html',
-    )
 }
 
 
@@ -48,13 +47,14 @@ class PrivateLinkValidationError(PermissionDenied):
 
 def _is_private_link_redirect(resp: requests.Response) -> bool:
     parsed = parse.urlparse(resp.url)
-    return parsed.path == '/login.html' and 'error=private-link-validation-error' in parsed.query
+    return parsed.path == "/login.html" and "error=private-link-validation-error" in parsed.query
 
 
 def _get_private_link_validation_error(url: str) -> PrivateLinkValidationError:
     parsed = parse.urlparse(url)
     env = get_environment_for_hostname(parsed.hostname)
-    return PrivateLinkValidationError(message=_private_link_info_map[env.cloud].error_message(),
-                                      error_code='PRIVATE_LINK_VALIDATION_ERROR',
-                                      status_code=403,
-                                      )
+    return PrivateLinkValidationError(
+        message=_private_link_info_map[env.cloud].error_message(),
+        error_code="PRIVATE_LINK_VALIDATION_ERROR",
+        status_code=403,
+    )
diff --git a/databricks/sdk/logger/round_trip_logger.py b/databricks/sdk/logger/round_trip_logger.py
index 1c0a47f08..e6ac5e80b 100644
--- a/databricks/sdk/logger/round_trip_logger.py
+++ b/databricks/sdk/logger/round_trip_logger.py
@@ -15,11 +15,13 @@ class RoundTrip:
     :param raw: Whether the response is a stream or not. If True, the response will not be logged directly.
     """
 
-    def __init__(self,
-                 response: requests.Response,
-                 debug_headers: bool,
-                 debug_truncate_bytes: int,
-                 raw=False):
+    def __init__(
+        self,
+        response: requests.Response,
+        debug_headers: bool,
+        debug_truncate_bytes: int,
+        raw=False,
+    ):
         self._debug_headers = debug_headers
         self._debug_truncate_bytes = max(debug_truncate_bytes, 96)
         self._raw = raw
@@ -34,28 +36,34 @@ def generate(self) -> str:
         """
         request = self._response.request
         url = urllib.parse.urlparse(request.url)
-        query = ''
+        query = ""
         if url.query:
-            query = f'?{urllib.parse.unquote(url.query)}'
-        sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
+            query = f"?{urllib.parse.unquote(url.query)}"
+        sb = [f"{request.method} {urllib.parse.unquote(url.path)}{query}"]
         if self._debug_headers:
             for k, v in request.headers.items():
-                sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
+                sb.append(f"> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}")
         if request.body:
             sb.append("> [raw stream]" if self._raw else self._redacted_dump("> ", request.body))
-        sb.append(f'< {self._response.status_code} {self._response.reason}')
-        if self._raw and self._response.headers.get('Content-Type', None) != 'application/json':
+        sb.append(f"< {self._response.status_code} {self._response.reason}")
+        if self._raw and self._response.headers.get("Content-Type", None) != "application/json":
             # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
             sb.append("< [raw stream]")
         elif self._response.content:
-            decoded = self._response.content.decode('utf-8', errors='replace')
+            decoded = self._response.content.decode("utf-8", errors="replace")
             sb.append(self._redacted_dump("< ", decoded))
-        return '\n'.join(sb)
+        return "\n".join(sb)
 
     @staticmethod
     def _mask(m: Dict[str, any]):
         for k in m:
-            if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
+            if k in {
+                "bytes_value",
+                "string_value",
+                "token_value",
+                "value",
+                "content",
+            }:
                 m[k] = "**REDACTED**"
 
     @staticmethod
@@ -66,7 +74,7 @@ def _map_keys(m: Dict[str, any]) -> List[str]:
 
     @staticmethod
     def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
-        diff = len(j.encode('utf-8')) - num_bytes
+        diff = len(j.encode("utf-8")) - num_bytes
         if diff > 0:
             return f"{j[:num_bytes]}... ({diff} more bytes)"
         return j
@@ -112,8 +120,8 @@ def _redacted_dump(self, prefix: str, body: str) -> str:
                 max_bytes = self._debug_truncate_bytes
             # Re-marshal body taking redaction and character limit into account.
             raw = self._recursive_marshal(tmp, max_bytes)
-            return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
+            return "\n".join([f"{prefix}{line}" for line in json.dumps(raw, indent=2).split("\n")])
         except json.JSONDecodeError:
             to_log = self._only_n_bytes(body, self._debug_truncate_bytes)
-            log_lines = [prefix + x.strip('\r') for x in to_log.split("\n")]
-            return '\n'.join(log_lines)
+            log_lines = [prefix + x.strip("\r") for x in to_log.split("\n")]
+            return "\n".join(log_lines)
diff --git a/databricks/sdk/mixins/compute.py b/databricks/sdk/mixins/compute.py
index f681b2d74..164887fb3 100644
--- a/databricks/sdk/mixins/compute.py
+++ b/databricks/sdk/mixins/compute.py
@@ -9,7 +9,7 @@
 from databricks.sdk.errors import OperationFailed
 from databricks.sdk.service import compute
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 
 @dataclass
@@ -22,35 +22,39 @@ class SemVer:
 
     # official https://semver.org/ recommendation: https://regex101.com/r/Ly7O1x/
     # with addition of "x" wildcards for minor/patch versions. Also, patch version may be omitted.
-    _pattern = re.compile(r"^"
-                          r"(?P0|[1-9]\d*)\.(?Px|0|[1-9]\d*)(\.(?Px|0|[1-9x]\d*))?"
-                          r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
-                          r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
-                          r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
+    _pattern = re.compile(
+        r"^"
+        r"(?P0|[1-9]\d*)\.(?Px|0|[1-9]\d*)(\.(?Px|0|[1-9x]\d*))?"
+        r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
+        r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
+        r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
+    )
 
     @classmethod
-    def parse(cls, v: str) -> 'SemVer':
+    def parse(cls, v: str) -> "SemVer":
         if not v:
-            raise ValueError(f'Not a valid SemVer: {v}')
-        if v[0] != 'v':
-            v = f'v{v}'
+            raise ValueError(f"Not a valid SemVer: {v}")
+        if v[0] != "v":
+            v = f"v{v}"
         m = cls._pattern.match(v[1:])
         if not m:
-            raise ValueError(f'Not a valid SemVer: {v}')
+            raise ValueError(f"Not a valid SemVer: {v}")
         # patch and/or minor versions may be wildcards.
         # for now, we're converting wildcards to zeroes.
-        minor = m.group('minor')
+        minor = m.group("minor")
         try:
-            patch = m.group('patch')
+            patch = m.group("patch")
         except IndexError:
             patch = 0
-        return SemVer(major=int(m.group('major')),
-                      minor=0 if minor == 'x' else int(minor),
-                      patch=0 if patch == 'x' or patch is None else int(patch),
-                      pre_release=m.group('pre_release'),
-                      build=m.group('build'))
+        return SemVer(
+            major=int(m.group("major")),
+            minor=0 if minor == "x" else int(minor),
+            patch=0 if patch == "x" or patch is None else int(patch),
+            pre_release=m.group("pre_release"),
+            build=m.group("build"),
+        )
 
-    def __lt__(self, other: 'SemVer'):
+    def __lt__(self, other: "SemVer"):
         if not other:
             return False
         if self.major != other.major:
@@ -69,17 +73,19 @@ def __lt__(self, other: 'SemVer'):
 class ClustersExt(compute.ClustersAPI):
     __doc__ = compute.ClustersAPI.__doc__
 
-    def select_spark_version(self,
-                             long_term_support: bool = False,
-                             beta: bool = False,
-                             latest: bool = True,
-                             ml: bool = False,
-                             genomics: bool = False,
-                             gpu: bool = False,
-                             scala: str = "2.12",
-                             spark_version: str = None,
-                             photon: bool = False,
-                             graviton: bool = False) -> str:
+    def select_spark_version(
+        self,
+        long_term_support: bool = False,
+        beta: bool = False,
+        latest: bool = True,
+        ml: bool = False,
+        genomics: bool = False,
+        gpu: bool = False,
+        scala: str = "2.12",
+        spark_version: str = None,
+        photon: bool = False,
+        graviton: bool = False,
+    ) -> str:
         """Selects the latest Databricks Runtime Version.
 
         :param long_term_support: bool
@@ -101,10 +107,15 @@ def select_spark_version(self,
         for version in sv.versions:
             if "-scala" + scala not in version.key:
                 continue
-            matches = (("apache-spark-" not in version.key) and (("-ml-" in version.key) == ml)
-                       and (("-hls-" in version.key) == genomics) and (("-gpu-" in version.key) == gpu)
-                       and (("-photon-" in version.key) == photon)
-                       and (("-aarch64-" in version.key) == graviton) and (("Beta" in version.name) == beta))
+            matches = (
+                ("apache-spark-" not in version.key)
+                and (("-ml-" in version.key) == ml)
+                and (("-hls-" in version.key) == genomics)
+                and (("-gpu-" in version.key) == gpu)
+                and (("-photon-" in version.key) == photon)
+                and (("-aarch64-" in version.key) == graviton)
+                and (("Beta" in version.name) == beta)
+            )
             if matches and long_term_support:
                 matches = matches and (("LTS" in version.name) or ("-esr-" in version.key))
             if matches and spark_version:
@@ -127,8 +138,17 @@ def _node_sorting_tuple(item: compute.NodeType) -> tuple:
             local_nvme_disk = item.node_instance_type.local_nvme_disks
             local_disk_size_gb = item.node_instance_type.local_disk_size_gb
             local_nvme_disk_size_gb = item.node_instance_type.local_nvme_disk_size_gb
-        return (item.is_deprecated, item.num_cores, item.memory_mb, local_disks, local_disk_size_gb,
-                local_nvme_disk, local_nvme_disk_size_gb, item.num_gpus, item.instance_type_id)
+        return (
+            item.is_deprecated,
+            item.num_cores,
+            item.memory_mb,
+            local_disks,
+            local_disk_size_gb,
+            local_nvme_disk,
+            local_nvme_disk_size_gb,
+            item.num_gpus,
+            item.instance_type_id,
+        )
 
     @staticmethod
     def _should_node_be_skipped(nt: compute.NodeType) -> bool:
@@ -138,24 +158,29 @@ def _should_node_be_skipped(nt: compute.NodeType) -> bool:
             return False
         val = compute.CloudProviderNodeStatus
         for st in nt.node_info.status:
-            if st in (val.NOT_AVAILABLE_IN_REGION, val.NOT_ENABLED_ON_SUBSCRIPTION):
+            if st in (
+                val.NOT_AVAILABLE_IN_REGION,
+                val.NOT_ENABLED_ON_SUBSCRIPTION,
+            ):
                 return True
         return False
 
-    def select_node_type(self,
-                         min_memory_gb: int = None,
-                         gb_per_core: int = None,
-                         min_cores: int = None,
-                         min_gpus: int = None,
-                         local_disk: bool = None,
-                         local_disk_min_size: int = None,
-                         category: str = None,
-                         photon_worker_capable: bool = None,
-                         photon_driver_capable: bool = None,
-                         graviton: bool = None,
-                         is_io_cache_enabled: bool = None,
-                         support_port_forwarding: bool = None,
-                         fleet: str = None) -> str:
+    def select_node_type(
+        self,
+        min_memory_gb: int = None,
+        gb_per_core: int = None,
+        min_cores: int = None,
+        min_gpus: int = None,
+        local_disk: bool = None,
+        local_disk_min_size: int = None,
+        category: str = None,
+        photon_worker_capable: bool = None,
+        photon_driver_capable: bool = None,
+        graviton: bool = None,
+        is_io_cache_enabled: bool = None,
+        support_port_forwarding: bool = None,
+        fleet: str = None,
+    ) -> str:
         """Selects smallest available node type given the conditions.
 
         :param min_memory_gb: int
@@ -194,12 +219,13 @@ def select_node_type(self,
             if local_disk or local_disk_min_size is not None:
                 instance_type = nt.node_instance_type
                 local_disks = int(instance_type.local_disks) if instance_type.local_disks else 0
-                local_nvme_disks = int(
-                    instance_type.local_nvme_disks) if instance_type.local_nvme_disks else 0
+                local_nvme_disks = int(instance_type.local_nvme_disks) if instance_type.local_nvme_disks else 0
                 if instance_type is None or (local_disks < 1 and local_nvme_disks < 1):
                     continue
                 local_disk_size_gb = instance_type.local_disk_size_gb if instance_type.local_disk_size_gb else 0
-                local_nvme_disk_size_gb = instance_type.local_nvme_disk_size_gb if instance_type.local_nvme_disk_size_gb else 0
+                local_nvme_disk_size_gb = (
+                    instance_type.local_nvme_disk_size_gb if instance_type.local_nvme_disk_size_gb else 0
+                )
                 all_disks_size = local_disk_size_gb + local_nvme_disk_size_gb
                 if local_disk_min_size is not None and all_disks_size < local_disk_min_size:
                     continue
@@ -235,16 +261,20 @@ def ensure_cluster_is_running(self, cluster_id: str) -> None:
                     self.wait_get_cluster_terminated(cluster_id)
                     self.start(cluster_id).result()
                     return
-                elif info.state in (state.PENDING, state.RESIZING, state.RESTARTING):
+                elif info.state in (
+                    state.PENDING,
+                    state.RESIZING,
+                    state.RESTARTING,
+                ):
                     self.wait_get_cluster_running(cluster_id)
                     return
                 elif info.state in (state.ERROR, state.UNKNOWN):
-                    raise RuntimeError(f'Cluster {info.cluster_name} is {info.state}: {info.state_message}')
+                    raise RuntimeError(f"Cluster {info.cluster_name} is {info.state}: {info.state_message}")
             except DatabricksError as e:
-                if e.error_code == 'INVALID_STATE':
-                    _LOG.debug(f'Cluster was started by other process: {e} Retrying.')
+                if e.error_code == "INVALID_STATE":
+                    _LOG.debug(f"Cluster was started by other process: {e} Retrying.")
                     continue
                 raise e
             except OperationFailed as e:
-                _LOG.debug('Operation failed, retrying', exc_info=e)
-        raise TimeoutError(f'timed out after {timeout}')
+                _LOG.debug("Operation failed, retrying", exc_info=e)
+        raise TimeoutError(f"timed out after {timeout}")
diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py
index 678b4b630..6a9b263bd 100644
--- a/databricks/sdk/mixins/files.py
+++ b/databricks/sdk/mixins/files.py
@@ -39,19 +39,25 @@ class _DbfsIO(BinaryIO):
     _offset = 0
     _closed = False
 
-    def __init__(self,
-                 api: files.DbfsAPI,
-                 path: str,
-                 *,
-                 read: bool = False,
-                 write: bool = False,
-                 overwrite: bool = False):
+    def __init__(
+        self,
+        api: files.DbfsAPI,
+        path: str,
+        *,
+        read: bool = False,
+        write: bool = False,
+        overwrite: bool = False,
+    ):
         self._api = api
         self._path = path
-        if write and read: raise IOError(f'can open either for reading or writing')
-        if read: self._status = api.get_status(path)
-        elif write: self._created = api.create(path, overwrite=overwrite)
-        else: raise IOError(f'need to open either for reading or writing')
+        if write and read:
+            raise IOError(f"can open either for reading or writing")
+        if read:
+            self._status = api.get_status(path)
+        elif write:
+            self._created = api.create(path, overwrite=overwrite)
+        else:
+            raise IOError(f"need to open either for reading or writing")
 
     def __enter__(self) -> Self:
         return self
@@ -69,54 +75,59 @@ def writable(self) -> bool:
         return self._created is not None
 
     def write(self, buffer: bytes) -> int:
-        """ Write bytes to file.
+        """Write bytes to file.
 
         :return: Return the number of bytes written.
         """
         if not self.writable():
-            raise IOError('file not open for writing')
+            raise IOError("file not open for writing")
         if type(buffer) is not bytes:
             # Python doesn't strictly enforce types. Even if they're specified.
-            raise TypeError(f'a bytes-like object is required, not {type(buffer)}')
+            raise TypeError(f"a bytes-like object is required, not {type(buffer)}")
         total = 0
         while total < len(buffer):
             chunk = buffer[total:]
             if len(chunk) > self.MAX_CHUNK_SIZE:
-                chunk = chunk[:self.MAX_CHUNK_SIZE]
+                chunk = chunk[: self.MAX_CHUNK_SIZE]
             encoded = base64.b64encode(chunk).decode()
             self._api.add_block(self._created.handle, encoded)
             total += len(chunk)
         return total
 
     def close(self) -> None:
-        """ Disable all I/O operations. """
-        if self.writable(): self._api.close(self._created.handle)
+        """Disable all I/O operations."""
+        if self.writable():
+            self._api.close(self._created.handle)
         self._closed = True
 
     @property
     def closed(self) -> bool:
         return self._closed
 
-    def __exit__(self, __t: Type[BaseException] | None, __value: BaseException | None,
-                 __traceback: TracebackType | None):
+    def __exit__(
+        self,
+        __t: Type[BaseException] | None,
+        __value: BaseException | None,
+        __traceback: TracebackType | None,
+    ):
         self.close()
 
     def readable(self) -> bool:
         return self._status is not None
 
     def read(self, size: int = ...) -> bytes:
-        """ Read at most size bytes, returned as a bytes object.
+        """Read at most size bytes, returned as a bytes object.
 
         :param size: If the size argument is negative, read until EOF is reached.
                      Return an empty bytes object at EOF.
         :return: bytes
         """
         if not self.readable():
-            raise IOError('file not open for reading')
+            raise IOError("file not open for reading")
 
         # call __iter__() and read until EOF is reached
         if size is ... or size < 0:
-            buffer = b''
+            buffer = b""
             for chunk in self:
                 buffer += chunk
             return buffer
@@ -128,7 +139,7 @@ def read(self, size: int = ...) -> bytes:
         if response.bytes_read == 0:
             # as per Python interface convention, return an empty bytes object at EOF,
             # and not the EOFError as in other SDKs
-            return b''
+            return b""
 
         raw = base64.b64decode(response.data)
         self._offset += response.bytes_read
@@ -178,7 +189,15 @@ def __repr__(self) -> str:
 
 class _VolumesIO(BinaryIO):
 
-    def __init__(self, api: files.FilesAPI, path: str, *, read: bool, write: bool, overwrite: bool):
+    def __init__(
+        self,
+        api: files.FilesAPI,
+        path: str,
+        *,
+        read: bool,
+        write: bool,
+        overwrite: bool,
+    ):
         self._buffer = []
         self._api = api
         self._path = path
@@ -198,8 +217,12 @@ def close(self):
         if self._closed:
             return
         if self._write:
-            to_write = b''.join(self._buffer)
-            self._api.upload(self._path, contents=BytesIO(to_write), overwrite=self._overwrite)
+            to_write = b"".join(self._buffer)
+            self._api.upload(
+                self._path,
+                contents=BytesIO(to_write),
+                overwrite=self._overwrite,
+            )
         elif self._read:
             self._read_handle.close()
         self._closed = True
@@ -215,7 +238,7 @@ def isatty(self) -> bool:
 
     def __check_closed(self):
         if self._closed:
-            raise ValueError('I/O operation on closed file')
+            raise ValueError("I/O operation on closed file")
 
     def __open_read(self):
         if self._read_handle is None:
@@ -277,55 +300,45 @@ def __repr__(self) -> str:
 class _Path(ABC):
 
     @abstractmethod
-    def __init__(self):
-        ...
+    def __init__(self): ...
 
     @property
     def is_local(self) -> bool:
         return self._is_local()
 
     @abstractmethod
-    def _is_local(self) -> bool:
-        ...
+    def _is_local(self) -> bool: ...
 
     @property
     def is_dbfs(self) -> bool:
         return self._is_dbfs()
 
     @abstractmethod
-    def _is_dbfs(self) -> bool:
-        ...
+    def _is_dbfs(self) -> bool: ...
 
     @abstractmethod
-    def child(self, path: str) -> str:
-        ...
+    def child(self, path: str) -> str: ...
 
     @_cached_property
     def is_dir(self) -> bool:
         return self._is_dir()
 
     @abstractmethod
-    def _is_dir(self) -> bool:
-        ...
+    def _is_dir(self) -> bool: ...
 
     @abstractmethod
-    def exists(self) -> bool:
-        ...
+    def exists(self) -> bool: ...
 
     @abstractmethod
-    def open(self, *, read=False, write=False, overwrite=False):
-        ...
+    def open(self, *, read=False, write=False, overwrite=False): ...
 
-    def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
-        ...
+    def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]: ...
 
     @abstractmethod
-    def mkdir(self):
-        ...
+    def mkdir(self): ...
 
     @abstractmethod
-    def delete(self, *, recursive=False):
-        ...
+    def delete(self, *, recursive=False): ...
 
     @property
     def name(self) -> str:
@@ -340,9 +353,9 @@ class _LocalPath(_Path):
 
     def __init__(self, path: str):
         if platform.system() == "Windows":
-            self._path = pathlib.Path(str(path).replace('file:///', '').replace('file:', ''))
+            self._path = pathlib.Path(str(path).replace("file:///", "").replace("file:", ""))
         else:
-            self._path = pathlib.Path(str(path).replace('file:', ''))
+            self._path = pathlib.Path(str(path).replace("file:", ""))
 
     def _is_local(self) -> bool:
         return True
@@ -365,16 +378,17 @@ def exists(self) -> bool:
     def open(self, *, read=False, write=False, overwrite=False):
         # make local fs follow the similar semantics as DBFS
         self._path.parent.mkdir(mode=0o755, parents=True, exist_ok=True)
-        return self._path.open(mode='wb' if overwrite else 'rb' if read else 'xb')
+        return self._path.open(mode="wb" if overwrite else "rb" if read else "xb")
 
     def list(self, recursive=False) -> Generator[files.FileInfo, None, None]:
         if not self.is_dir:
             st = self._path.stat()
-            yield files.FileInfo(path='file:' + str(self._path.absolute()),
-                                 is_dir=False,
-                                 file_size=st.st_size,
-                                 modification_time=int(st.st_mtime_ns / 1e6),
-                                 )
+            yield files.FileInfo(
+                path="file:" + str(self._path.absolute()),
+                is_dir=False,
+                file_size=st.st_size,
+                modification_time=int(st.st_mtime_ns / 1e6),
+            )
             return
         queue = deque([self._path])
         while queue:
@@ -385,11 +399,12 @@ def list(self, recursive=False) -> Generator[files.FileInfo, None, None]:
                         queue.append(leaf)
                     continue
                 info = leaf.stat()
-                yield files.FileInfo(path='file:' + str(leaf.absolute()),
-                                     is_dir=False,
-                                     file_size=info.st_size,
-                                     modification_time=int(info.st_mtime_ns / 1e6),
-                                     )
+                yield files.FileInfo(
+                    path="file:" + str(leaf.absolute()),
+                    is_dir=False,
+                    file_size=info.st_size,
+                    modification_time=int(info.st_mtime_ns / 1e6),
+                )
 
     def delete(self, *, recursive=False):
         if self.is_dir:
@@ -400,17 +415,17 @@ def delete(self, *, recursive=False):
         else:
             kw = {}
             if sys.version_info[:2] > (3, 7):
-                kw['missing_ok'] = True
+                kw["missing_ok"] = True
             self._path.unlink(**kw)
 
     def __repr__(self) -> str:
-        return f'<_LocalPath {self._path}>'
+        return f"<_LocalPath {self._path}>"
 
 
 class _VolumesPath(_Path):
 
     def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]):
-        self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
+        self._path = pathlib.PurePosixPath(str(src).replace("dbfs:", "").replace("file:", ""))
         self._api = api
 
     def _is_local(self) -> bool:
@@ -440,16 +455,23 @@ def exists(self) -> bool:
             return self.is_dir
 
     def open(self, *, read=False, write=False, overwrite=False) -> BinaryIO:
-        return _VolumesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite)
+        return _VolumesIO(
+            self._api,
+            self.as_string,
+            read=read,
+            write=write,
+            overwrite=overwrite,
+        )
 
     def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
         if not self.is_dir:
             meta = self._api.get_metadata(self.as_string)
-            yield files.FileInfo(path=self.as_string,
-                                 is_dir=False,
-                                 file_size=meta.content_length,
-                                 modification_time=meta.last_modified,
-                                 )
+            yield files.FileInfo(
+                path=self.as_string,
+                is_dir=False,
+                file_size=meta.content_length,
+                modification_time=meta.last_modified,
+            )
             return
         queue = deque([self])
         while queue:
@@ -458,11 +480,12 @@ def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
                 if recursive and file.is_directory:
                     queue.append(self.child(file.name))
                 if not recursive or not file.is_directory:
-                    yield files.FileInfo(path=file.path,
-                                         is_dir=file.is_directory,
-                                         file_size=file.file_size,
-                                         modification_time=file.last_modified,
-                                         )
+                    yield files.FileInfo(
+                        path=file.path,
+                        is_dir=file.is_directory,
+                        file_size=file.file_size,
+                        modification_time=file.last_modified,
+                    )
 
     def delete(self, *, recursive=False):
         if self.is_dir:
@@ -473,13 +496,13 @@ def delete(self, *, recursive=False):
             self._api.delete(self.as_string)
 
     def __repr__(self) -> str:
-        return f'<_VolumesPath {self._path}>'
+        return f"<_VolumesPath {self._path}>"
 
 
 class _DbfsPath(_Path):
 
     def __init__(self, api: files.DbfsAPI, src: str):
-        self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
+        self._path = pathlib.PurePosixPath(str(src).replace("dbfs:", "").replace("file:", ""))
         self._api = api
 
     def _is_local(self) -> bool:
@@ -510,16 +533,23 @@ def exists(self) -> bool:
             return False
 
     def open(self, *, read=False, write=False, overwrite=False) -> BinaryIO:
-        return _DbfsIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite)
+        return _DbfsIO(
+            self._api,
+            self.as_string,
+            read=read,
+            write=write,
+            overwrite=overwrite,
+        )
 
     def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
         if not self.is_dir:
             meta = self._api.get_status(self.as_string)
-            yield files.FileInfo(path=self.as_string,
-                                 is_dir=False,
-                                 file_size=meta.file_size,
-                                 modification_time=meta.modification_time,
-                                 )
+            yield files.FileInfo(
+                path=self.as_string,
+                is_dir=False,
+                file_size=meta.file_size,
+                modification_time=meta.modification_time,
+            )
             return
         queue = deque([self])
         while queue:
@@ -534,7 +564,7 @@ def delete(self, *, recursive=False):
         self._api.delete(self.as_string, recursive=recursive)
 
     def __repr__(self) -> str:
-        return f'<_DbfsPath {self._path}>'
+        return f"<_DbfsPath {self._path}>"
 
 
 class DbfsExt(files.DbfsAPI):
@@ -545,12 +575,14 @@ def __init__(self, api_client):
         self._files_api = files.FilesAPI(api_client)
         self._dbfs_api = files.DbfsAPI(api_client)
 
-    def open(self,
-             path: str,
-             *,
-             read: bool = False,
-             write: bool = False,
-             overwrite: bool = False) -> BinaryIO:
+    def open(
+        self,
+        path: str,
+        *,
+        read: bool = False,
+        write: bool = False,
+        overwrite: bool = False,
+    ) -> BinaryIO:
         return self._path(path).open(read=read, write=write, overwrite=overwrite)
 
     def upload(self, path: str, src: BinaryIO, *, overwrite: bool = False):
@@ -588,17 +620,18 @@ def exists(self, path: str) -> bool:
         p = self._path(path)
         return p.exists()
 
-    __ALLOWED_SCHEMES = [None, 'file', 'dbfs']
+    __ALLOWED_SCHEMES = [None, "file", "dbfs"]
 
     def _path(self, src):
         src = parse.urlparse(str(src))
         if src.scheme and src.scheme not in self.__ALLOWED_SCHEMES:
             raise ValueError(
                 f'unsupported scheme "{src.scheme}". DBUtils in the SDK only supports local, root DBFS, and '
-                'UC Volumes paths, not external locations or DBFS mount points.')
-        if src.scheme == 'file':
+                "UC Volumes paths, not external locations or DBFS mount points."
+            )
+        if src.scheme == "file":
             return _LocalPath(src.geturl())
-        if src.path.startswith('/Volumes'):
+        if src.path.startswith("/Volumes"):
             return _VolumesPath(self._files_api, src.geturl())
         return _DbfsPath(self._dbfs_api, src.geturl())
 
@@ -607,7 +640,7 @@ def copy(self, src: str, dst: str, *, recursive=False, overwrite=False):
         src = self._path(src)
         dst = self._path(dst)
         if src.is_local and dst.is_local:
-            raise IOError('both destinations are on local FS')
+            raise IOError("both destinations are on local FS")
         if dst.exists() and dst.is_dir:
             # if target is a folder, make file with the same name there
             dst = dst.child(src.name)
@@ -630,11 +663,11 @@ def move_(self, src: str, dst: str, *, recursive=False, overwrite=False):
             # this operation is recursive by default.
             return self.move(source.as_string, target.as_string)
         if source.is_local and target.is_local:
-            raise IOError('both destinations are on local FS')
+            raise IOError("both destinations are on local FS")
         if source.is_dir and not recursive:
-            src_type = 'local' if source.is_local else 'DBFS' if source.is_dbfs else 'UC Volume'
-            dst_type = 'local' if target.is_local else 'DBFS' if target.is_dbfs else 'UC Volume'
-            raise IOError(f'moving a directory from {src_type} to {dst_type} requires recursive flag')
+            src_type = "local" if source.is_local else "DBFS" if source.is_dbfs else "UC Volume"
+            dst_type = "local" if target.is_local else "DBFS" if target.is_dbfs else "UC Volume"
+            raise IOError(f"moving a directory from {src_type} to {dst_type} requires recursive flag")
         # do cross-fs moving
         self.copy(src, dst, recursive=recursive, overwrite=overwrite)
         self.delete(src, recursive=recursive)
@@ -643,7 +676,7 @@ def delete(self, path: str, *, recursive=False):
         """Delete file or directory on DBFS"""
         p = self._path(path)
         if p.is_dir and not recursive:
-            raise IOError('deleting directories requires recursive flag')
+            raise IOError("deleting directories requires recursive flag")
         p.delete(recursive=recursive)
 
 
@@ -670,56 +703,77 @@ def download(self, file_path: str) -> DownloadResponse:
         :returns: :class:`DownloadResponse`
         """
 
-        initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path,
-                                                                       start_byte_offset=0,
-                                                                       if_unmodified_since_timestamp=None)
+        initial_response: DownloadResponse = self._download_raw_stream(
+            file_path=file_path,
+            start_byte_offset=0,
+            if_unmodified_since_timestamp=None,
+        )
 
         wrapped_response = self._wrap_stream(file_path, initial_response)
         initial_response.contents._response = wrapped_response
         return initial_response
 
-    def _download_raw_stream(self,
-                             file_path: str,
-                             start_byte_offset: int,
-                             if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse:
-        headers = {'Accept': 'application/octet-stream', }
+    def _download_raw_stream(
+        self,
+        file_path: str,
+        start_byte_offset: int,
+        if_unmodified_since_timestamp: Optional[str] = None,
+    ) -> DownloadResponse:
+        headers = {
+            "Accept": "application/octet-stream",
+        }
 
         if start_byte_offset and not if_unmodified_since_timestamp:
             raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified")
 
         if start_byte_offset:
-            headers['Range'] = f'bytes={start_byte_offset}-'
+            headers["Range"] = f"bytes={start_byte_offset}-"
 
         if if_unmodified_since_timestamp:
-            headers['If-Unmodified-Since'] = if_unmodified_since_timestamp
-
-        response_headers = ['content-length', 'content-type', 'last-modified', ]
-        res = self._api.do('GET',
-                           f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
-                           headers=headers,
-                           response_headers=response_headers,
-                           raw=True)
+            headers["If-Unmodified-Since"] = if_unmodified_since_timestamp
+
+        response_headers = [
+            "content-length",
+            "content-type",
+            "last-modified",
+        ]
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}",
+            headers=headers,
+            response_headers=response_headers,
+            raw=True,
+        )
 
         result = DownloadResponse.from_dict(res)
         if not isinstance(result.contents, _StreamingResponse):
-            raise Exception("Internal error: response contents is of unexpected type: " +
-                            type(result.contents).__name__)
+            raise Exception(
+                "Internal error: response contents is of unexpected type: " + type(result.contents).__name__
+            )
 
         return result
 
     def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
         underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
-        return _ResilientResponse(self,
-                                  file_path,
-                                  downloadResponse.last_modified,
-                                  offset=0,
-                                  underlying_response=underlying_response)
+        return _ResilientResponse(
+            self,
+            file_path,
+            downloadResponse.last_modified,
+            offset=0,
+            underlying_response=underlying_response,
+        )
 
 
 class _ResilientResponse(_RawResponse):
 
-    def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int,
-                 underlying_response: _RawResponse):
+    def __init__(
+        self,
+        api: FilesExt,
+        file_path: str,
+        file_last_modified: str,
+        offset: int,
+        underlying_response: _RawResponse,
+    ):
         self.api = api
         self.file_path = file_path
         self.underlying_response = underlying_response
@@ -728,11 +782,17 @@ def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offse
 
     def iter_content(self, chunk_size=1, decode_unicode=False):
         if decode_unicode:
-            raise ValueError('Decode unicode is not supported')
+            raise ValueError("Decode unicode is not supported")
 
         iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
-        self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
-                                           self.api, chunk_size)
+        self.iterator = _ResilientIterator(
+            iterator,
+            self.file_path,
+            self.file_last_modified,
+            self.offset,
+            self.api,
+            chunk_size,
+        )
         return self.iterator
 
     def close(self):
@@ -744,12 +804,21 @@ class _ResilientIterator(Iterator):
     # and recovers from failures by requesting download from the current offset.
 
     @staticmethod
-    def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse:
-        streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse
+    def _extract_raw_response(
+        download_response: DownloadResponse,
+    ) -> _RawResponse:
+        streaming_response: _StreamingResponse = download_response.contents  # this is an instance of _StreamingResponse
         return streaming_response._response
 
-    def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int,
-                 api: FilesExt, chunk_size: int):
+    def __init__(
+        self,
+        underlying_iterator,
+        file_path: str,
+        file_last_modified: str,
+        offset: int,
+        api: FilesExt,
+        chunk_size: int,
+    ):
         self._underlying_iterator = underlying_iterator
         self._api = api
         self._file_path = file_path
@@ -768,14 +837,18 @@ def _should_recover(self) -> bool:
         if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
             _LOG.debug("Total recovers limit exceeded")
             return False
-        if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing:
+        if (
+            self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None
+            and self._recovers_without_progressing_count
+            >= self._api._config.files_api_client_download_max_total_recovers_without_progressing
+        ):
             _LOG.debug("No progression recovers limit exceeded")
             return False
         return True
 
     def _recover(self) -> bool:
         if not self._should_recover():
-            return False # recover suppressed, rethrow original exception
+            return False  # recover suppressed, rethrow original exception
 
         self._total_recovers_count += 1
         self._recovers_without_progressing_count += 1
@@ -786,15 +859,15 @@ def _recover(self) -> bool:
             _LOG.debug("Trying to recover from offset " + str(self._offset))
 
             # following call includes all the required network retries
-            downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
-                                                              self._file_last_modified)
+            downloadResponse = self._api._download_raw_stream(self._file_path, self._offset, self._file_last_modified)
             underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
-            self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
-                                                                         decode_unicode=False)
+            self._underlying_iterator = underlying_response.iter_content(
+                chunk_size=self._chunk_size, decode_unicode=False
+            )
             _LOG.debug("Recover succeeded")
             return True
         except:
-            return False # recover failed, rethrow original exception
+            return False  # recover failed, rethrow original exception
 
     def __next__(self):
         if self._closed:
diff --git a/databricks/sdk/mixins/jobs.py b/databricks/sdk/mixins/jobs.py
index 907310860..1e6cf25d5 100644
--- a/databricks/sdk/mixins/jobs.py
+++ b/databricks/sdk/mixins/jobs.py
@@ -6,13 +6,15 @@
 
 class JobsExt(jobs.JobsAPI):
 
-    def list(self,
-             *,
-             expand_tasks: Optional[bool] = None,
-             limit: Optional[int] = None,
-             name: Optional[str] = None,
-             offset: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[BaseJob]:
+    def list(
+        self,
+        *,
+        expand_tasks: Optional[bool] = None,
+        limit: Optional[int] = None,
+        name: Optional[str] = None,
+        offset: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[BaseJob]:
         """List jobs.
 
         Retrieves a list of jobs. If the job has multiple pages of tasks, job_clusters, parameters or environments,
@@ -36,11 +38,13 @@ def list(self,
         :returns: Iterator over :class:`BaseJob`
         """
         # fetch jobs with limited elements in top level arrays
-        jobs_list = super().list(expand_tasks=expand_tasks,
-                                 limit=limit,
-                                 name=name,
-                                 offset=offset,
-                                 page_token=page_token)
+        jobs_list = super().list(
+            expand_tasks=expand_tasks,
+            limit=limit,
+            name=name,
+            offset=offset,
+            page_token=page_token,
+        )
         if not expand_tasks:
             yield from jobs_list
 
@@ -55,22 +59,24 @@ def list(self,
             # Remove has_more fields for each job in the list.
             # This field in Jobs API 2.2 is useful for pagination. It indicates if there are more than 100 tasks or job_clusters in the job.
             # This function hides pagination details from the user. So the field does not play useful role here.
-            if hasattr(job, 'has_more'):
-                delattr(job, 'has_more')
+            if hasattr(job, "has_more"):
+                delattr(job, "has_more")
             yield job
 
-    def list_runs(self,
-                  *,
-                  active_only: Optional[bool] = None,
-                  completed_only: Optional[bool] = None,
-                  expand_tasks: Optional[bool] = None,
-                  job_id: Optional[int] = None,
-                  limit: Optional[int] = None,
-                  offset: Optional[int] = None,
-                  page_token: Optional[str] = None,
-                  run_type: Optional[RunType] = None,
-                  start_time_from: Optional[int] = None,
-                  start_time_to: Optional[int] = None) -> Iterator[BaseRun]:
+    def list_runs(
+        self,
+        *,
+        active_only: Optional[bool] = None,
+        completed_only: Optional[bool] = None,
+        expand_tasks: Optional[bool] = None,
+        job_id: Optional[int] = None,
+        limit: Optional[int] = None,
+        offset: Optional[int] = None,
+        page_token: Optional[str] = None,
+        run_type: Optional[RunType] = None,
+        start_time_from: Optional[int] = None,
+        start_time_to: Optional[int] = None,
+    ) -> Iterator[BaseRun]:
         """List job runs.
 
         List runs in descending order by start time. If the job has multiple pages of tasks, job_clusters, parameters or repair history,
@@ -109,16 +115,18 @@ def list_runs(self,
         :returns: Iterator over :class:`BaseRun`
         """
         # fetch runs with limited elements in top level arrays
-        runs_list = super().list_runs(active_only=active_only,
-                                      completed_only=completed_only,
-                                      expand_tasks=expand_tasks,
-                                      job_id=job_id,
-                                      limit=limit,
-                                      offset=offset,
-                                      page_token=page_token,
-                                      run_type=run_type,
-                                      start_time_from=start_time_from,
-                                      start_time_to=start_time_to)
+        runs_list = super().list_runs(
+            active_only=active_only,
+            completed_only=completed_only,
+            expand_tasks=expand_tasks,
+            job_id=job_id,
+            limit=limit,
+            offset=offset,
+            page_token=page_token,
+            run_type=run_type,
+            start_time_from=start_time_from,
+            start_time_to=start_time_to,
+        )
 
         if not expand_tasks:
             yield from runs_list
@@ -134,16 +142,18 @@ def list_runs(self,
             # Remove has_more fields for each run in the list.
             # This field in Jobs API 2.2 is useful for pagination. It indicates if there are more than 100 tasks or job_clusters in the run.
             # This function hides pagination details from the user. So the field does not play useful role here.
-            if hasattr(run, 'has_more'):
-                delattr(run, 'has_more')
+            if hasattr(run, "has_more"):
+                delattr(run, "has_more")
             yield run
 
-    def get_run(self,
-                run_id: int,
-                *,
-                include_history: Optional[bool] = None,
-                include_resolved_values: Optional[bool] = None,
-                page_token: Optional[str] = None) -> jobs.Run:
+    def get_run(
+        self,
+        run_id: int,
+        *,
+        include_history: Optional[bool] = None,
+        include_resolved_values: Optional[bool] = None,
+        page_token: Optional[str] = None,
+    ) -> jobs.Run:
         """Get a single job run.
 
         Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history.
@@ -160,10 +170,12 @@ def get_run(self,
 
         :returns: :class:`Run`
         """
-        run = super().get_run(run_id,
-                              include_history=include_history,
-                              include_resolved_values=include_resolved_values,
-                              page_token=page_token)
+        run = super().get_run(
+            run_id,
+            include_history=include_history,
+            include_resolved_values=include_resolved_values,
+            page_token=page_token,
+        )
 
         # When querying a Job run, a page token is returned when there are more than 100 tasks. No iterations are defined for a Job run. Therefore, the next page in the response only includes the next page of tasks.
         # When querying a ForEach task run, a page token is returned when there are more than 100 iterations. Only a single task is returned, corresponding to the ForEach task itself. Therefore, the client only reads the iterations from the next page and not the tasks.
@@ -171,10 +183,12 @@ def get_run(self,
 
         # runs/get response includes next_page_token as long as there are more pages to fetch.
         while run.next_page_token is not None:
-            next_run = super().get_run(run_id,
-                                       include_history=include_history,
-                                       include_resolved_values=include_resolved_values,
-                                       page_token=run.next_page_token)
+            next_run = super().get_run(
+                run_id,
+                include_history=include_history,
+                include_resolved_values=include_resolved_values,
+                page_token=run.next_page_token,
+            )
             if is_paginating_iterations:
                 run.iterations.extend(next_run.iterations)
             else:
@@ -213,4 +227,4 @@ def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
             job.settings.environments.extend(next_job.settings.environments)
             job.next_page_token = next_job.next_page_token
 
-        return job
\ No newline at end of file
+        return job
diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
index e5bea9607..62835dcec 100644
--- a/databricks/sdk/mixins/open_ai_client.py
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -40,8 +40,9 @@ def get_open_ai_client(self):
 
         return OpenAI(
             base_url=self._api._cfg.host + "/serving-endpoints",
-            api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
-            http_client=self._get_authorized_http_client())
+            api_key="no-token",  # Passing in a placeholder to pass validations, this will not be used
+            http_client=self._get_authorized_http_client(),
+        )
 
     def get_langchain_chat_open_ai_client(self, model):
         try:
@@ -54,17 +55,20 @@ def get_langchain_chat_open_ai_client(self, model):
         return ChatOpenAI(
             model=model,
             openai_api_base=self._api._cfg.host + "/serving-endpoints",
-            api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
-            http_client=self._get_authorized_http_client())
-
-    def http_request(self,
-                     conn: str,
-                     method: ExternalFunctionRequestHttpMethod,
-                     path: str,
-                     *,
-                     headers: Optional[Dict[str, str]] = None,
-                     json: Optional[Dict[str, str]] = None,
-                     params: Optional[Dict[str, str]] = None) -> Response:
+            api_key="no-token",  # Passing in a placeholder to pass validations, this will not be used
+            http_client=self._get_authorized_http_client(),
+        )
+
+    def http_request(
+        self,
+        conn: str,
+        method: ExternalFunctionRequestHttpMethod,
+        path: str,
+        *,
+        headers: Optional[Dict[str, str]] = None,
+        json: Optional[Dict[str, str]] = None,
+        params: Optional[Dict[str, str]] = None,
+    ) -> Response:
         """Make external services call using the credentials stored in UC Connection.
         **NOTE:** Experimental: This API may change or be removed in a future release without warning.
         :param conn: str
@@ -84,16 +88,18 @@ def http_request(self,
         """
         response = Response()
         response.status_code = 200
-        server_response = super().http_request(connection_name=conn,
-                                               method=method,
-                                               path=path,
-                                               headers=js.dumps(headers) if headers is not None else None,
-                                               json=js.dumps(json) if json is not None else None,
-                                               params=js.dumps(params) if params is not None else None)
+        server_response = super().http_request(
+            connection_name=conn,
+            method=method,
+            path=path,
+            headers=js.dumps(headers) if headers is not None else None,
+            json=js.dumps(json) if json is not None else None,
+            params=js.dumps(params) if params is not None else None,
+        )
 
         # Read the content from the HttpRequestResponse object
         if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
-            raw_content = server_response.contents.read() # Read the bytes
+            raw_content = server_response.contents.read()  # Read the bytes
         else:
             raise ValueError("Invalid response from the server.")
 
diff --git a/databricks/sdk/mixins/workspace.py b/databricks/sdk/mixins/workspace.py
index 8d51ef2bc..7476a4b83 100644
--- a/databricks/sdk/mixins/workspace.py
+++ b/databricks/sdk/mixins/workspace.py
@@ -6,18 +6,20 @@
 
 
 def _fqcn(x: any) -> str:
-    return f'{x.__module__}.{x.__name__}'
+    return f"{x.__module__}.{x.__name__}"
 
 
 class WorkspaceExt(WorkspaceAPI):
     __doc__ = WorkspaceAPI.__doc__
 
-    def list(self,
-             path: str,
-             *,
-             notebooks_modified_after: Optional[int] = None,
-             recursive: Optional[bool] = False,
-             **kwargs) -> Iterator[ObjectInfo]:
+    def list(
+        self,
+        path: str,
+        *,
+        notebooks_modified_after: Optional[int] = None,
+        recursive: Optional[bool] = False,
+        **kwargs,
+    ) -> Iterator[ObjectInfo]:
         """List workspace objects
 
         :param recursive: bool
@@ -35,13 +37,15 @@ def list(self,
                     continue
                 yield object_info
 
-    def upload(self,
-               path: str,
-               content: Union[bytes, BinaryIO],
-               *,
-               format: Optional[ImportFormat] = None,
-               language: Optional[Language] = None,
-               overwrite: Optional[bool] = False) -> None:
+    def upload(
+        self,
+        path: str,
+        content: Union[bytes, BinaryIO],
+        *,
+        format: Optional[ImportFormat] = None,
+        language: Optional[Language] = None,
+        overwrite: Optional[bool] = False,
+    ) -> None:
         """
         Uploads a workspace object (for example, a notebook or file) or the contents of an entire
         directory (`DBC` format).
@@ -60,31 +64,37 @@ def upload(self,
         :param language: Only required if using `ExportFormat.SOURCE`.
         """
         if format is not None and not isinstance(format, ImportFormat):
-            raise ValueError(
-                f'format is expected to be {_fqcn(ImportFormat)}, but got {_fqcn(format.__class__)}')
+            raise ValueError(f"format is expected to be {_fqcn(ImportFormat)}, but got {_fqcn(format.__class__)}")
         if (not format or format == ImportFormat.SOURCE) and not language:
             suffixes = {
-                '.py': Language.PYTHON,
-                '.sql': Language.SQL,
-                '.scala': Language.SCALA,
-                '.R': Language.R
+                ".py": Language.PYTHON,
+                ".sql": Language.SQL,
+                ".scala": Language.SCALA,
+                ".R": Language.R,
             }
             for sfx, lang in suffixes.items():
                 if path.endswith(sfx):
                     language = lang
                     break
         if language is not None and not isinstance(language, Language):
-            raise ValueError(
-                f'language is expected to be {_fqcn(Language)}, but got {_fqcn(language.__class__)}')
-        data = {'path': path}
-        if format: data['format'] = format.value
-        if language: data['language'] = language.value
-        if overwrite: data['overwrite'] = 'true'
+            raise ValueError(f"language is expected to be {_fqcn(Language)}, but got {_fqcn(language.__class__)}")
+        data = {"path": path}
+        if format:
+            data["format"] = format.value
+        if language:
+            data["language"] = language.value
+        if overwrite:
+            data["overwrite"] = "true"
         try:
-            return self._api.do('POST', '/api/2.0/workspace/import', files={'content': content}, data=data)
+            return self._api.do(
+                "POST",
+                "/api/2.0/workspace/import",
+                files={"content": content},
+                data=data,
+            )
         except DatabricksError as e:
-            if e.error_code == 'INVALID_PARAMETER_VALUE':
-                msg = f'Perhaps you forgot to specify the `format=ImportFormat.AUTO`. {e}'
+            if e.error_code == "INVALID_PARAMETER_VALUE":
+                msg = f"Perhaps you forgot to specify the `format=ImportFormat.AUTO`. {e}"
                 raise DatabricksError(message=msg, error_code=e.error_code)
             else:
                 raise e
@@ -100,7 +110,8 @@ def download(self, path: str, *, format: Optional[ExportFormat] = None) -> Binar
                          the request.
         :return:         file-like `io.BinaryIO` of the `path` contents.
         """
-        query = {'path': path, 'direct_download': 'true'}
-        if format: query['format'] = format.value
-        response = self._api.do('GET', '/api/2.0/workspace/export', query=query, raw=True)
+        query = {"path": path, "direct_download": "true"}
+        if format:
+            query["format"] = format.value
+        response = self._api.do("GET", "/api/2.0/workspace/export", query=query, raw=True)
         return response["contents"]
diff --git a/databricks/sdk/oauth.py b/databricks/sdk/oauth.py
index c9a9d15c6..4685efa5c 100644
--- a/databricks/sdk/oauth.py
+++ b/databricks/sdk/oauth.py
@@ -23,7 +23,7 @@
 
 # Error code for PKCE flow in Azure Active Directory, that gets additional retry.
 # See https://stackoverflow.com/a/75466778/277035 for more info
-NO_ORIGIN_FOR_SPA_CLIENT_ERROR = 'AADSTS9002327'
+NO_ORIGIN_FOR_SPA_CLIENT_ERROR = "AADSTS9002327"
 
 URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
 JWT_BEARER_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
@@ -54,20 +54,25 @@ class OidcEndpoints:
     The endpoints used for OAuth-based authentication in Databricks.
     """
 
-    authorization_endpoint: str # ../v1/authorize
+    authorization_endpoint: str  # ../v1/authorize
     """The authorization endpoint for the OAuth flow. The user-agent should be directed to this endpoint in order for
     the user to login and authorize the client for user-to-machine (U2M) flows."""
 
-    token_endpoint: str # ../v1/token
+    token_endpoint: str  # ../v1/token
     """The token endpoint for the OAuth flow."""
 
     @staticmethod
-    def from_dict(d: dict) -> 'OidcEndpoints':
-        return OidcEndpoints(authorization_endpoint=d.get('authorization_endpoint'),
-                             token_endpoint=d.get('token_endpoint'))
+    def from_dict(d: dict) -> "OidcEndpoints":
+        return OidcEndpoints(
+            authorization_endpoint=d.get("authorization_endpoint"),
+            token_endpoint=d.get("token_endpoint"),
+        )
 
     def as_dict(self) -> dict:
-        return {'authorization_endpoint': self.authorization_endpoint, 'token_endpoint': self.token_endpoint}
+        return {
+            "authorization_endpoint": self.authorization_endpoint,
+            "token_endpoint": self.token_endpoint,
+        }
 
 
 @dataclass
@@ -93,19 +98,24 @@ def valid(self):
         return self.access_token and not self.expired
 
     def as_dict(self) -> dict:
-        raw = {'access_token': self.access_token, 'token_type': self.token_type}
+        raw = {
+            "access_token": self.access_token,
+            "token_type": self.token_type,
+        }
         if self.expiry:
-            raw['expiry'] = self.expiry.isoformat()
+            raw["expiry"] = self.expiry.isoformat()
         if self.refresh_token:
-            raw['refresh_token'] = self.refresh_token
+            raw["refresh_token"] = self.refresh_token
         return raw
 
     @staticmethod
-    def from_dict(raw: dict) -> 'Token':
-        return Token(access_token=raw['access_token'],
-                     token_type=raw['token_type'],
-                     expiry=datetime.fromisoformat(raw['expiry']),
-                     refresh_token=raw.get('refresh_token'))
+    def from_dict(raw: dict) -> "Token":
+        return Token(
+            access_token=raw["access_token"],
+            token_type=raw["token_type"],
+            expiry=datetime.fromisoformat(raw["expiry"]),
+            refresh_token=raw.get("refresh_token"),
+        )
 
     def jwt_claims(self) -> Dict[str, str]:
         """Get claims from the access token or return an empty dictionary if it is not a JWT token.
@@ -133,7 +143,7 @@ def jwt_claims(self) -> Dict[str, str]:
         try:
             jwt_split = self.access_token.split(".")
             if len(jwt_split) != 3:
-                logger.debug(f'Tried to decode access token as JWT, but failed: {len(jwt_split)} components')
+                logger.debug(f"Tried to decode access token as JWT, but failed: {len(jwt_split)} components")
                 return {}
             payload_with_padding = jwt_split[1] + "=="
             payload_bytes = base64.standard_b64decode(payload_with_padding)
@@ -141,7 +151,7 @@ def jwt_claims(self) -> Dict[str, str]:
             claims = json.loads(payload_json)
             return claims
         except ValueError as err:
-            logger.debug(f'Tried to decode access token as JWT, but failed: {err}')
+            logger.debug(f"Tried to decode access token as JWT, but failed: {err}")
             return {}
 
 
@@ -152,17 +162,21 @@ def token(self) -> Token:
         pass
 
 
-def retrieve_token(client_id,
-                   client_secret,
-                   token_url,
-                   params,
-                   use_params=False,
-                   use_header=False,
-                   headers=None) -> Token:
-    logger.debug(f'Retrieving token for {client_id}')
+def retrieve_token(
+    client_id,
+    client_secret,
+    token_url,
+    params,
+    use_params=False,
+    use_header=False,
+    headers=None,
+) -> Token:
+    logger.debug(f"Retrieving token for {client_id}")
     if use_params:
-        if client_id: params["client_id"] = client_id
-        if client_secret: params["client_secret"] = client_secret
+        if client_id:
+            params["client_id"] = client_id
+        if client_secret:
+            params["client_secret"] = client_secret
     auth = None
     if use_header:
         auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
@@ -170,21 +184,23 @@ def retrieve_token(client_id,
         auth = IgnoreNetrcAuth()
     resp = requests.post(token_url, params, auth=auth, headers=headers)
     if not resp.ok:
-        if resp.headers['Content-Type'].startswith('application/json'):
+        if resp.headers["Content-Type"].startswith("application/json"):
             err = resp.json()
-            code = err.get('errorCode', err.get('error', 'unknown'))
-            summary = err.get('errorSummary', err.get('error_description', 'unknown'))
-            summary = summary.replace("\r\n", ' ')
-            raise ValueError(f'{code}: {summary}')
+            code = err.get("errorCode", err.get("error", "unknown"))
+            summary = err.get("errorSummary", err.get("error_description", "unknown"))
+            summary = summary.replace("\r\n", " ")
+            raise ValueError(f"{code}: {summary}")
         raise ValueError(resp.content)
     try:
         j = resp.json()
         expires_in = int(j["expires_in"])
         expiry = datetime.now() + timedelta(seconds=expires_in)
-        return Token(access_token=j["access_token"],
-                     refresh_token=j.get('refresh_token'),
-                     token_type=j["token_type"],
-                     expiry=expiry)
+        return Token(
+            access_token=j["access_token"],
+            refresh_token=j.get("refresh_token"),
+            token_type=j["token_type"],
+            expiry=expiry,
+        )
     except Exception as e:
         raise NotImplementedError(f"Not supported yet: {e}")
 
@@ -197,9 +213,10 @@ class _TokenState(Enum):
       - STALE: The token is valid but will expire soon.
       - EXPIRED: The token has expired and cannot be used.
     """
-    FRESH = 1 # The token is valid.
-    STALE = 2 # The token is valid but will expire soon.
-    EXPIRED = 3 # The token has expired and cannot be used.
+
+    FRESH = 1  # The token is valid.
+    STALE = 2  # The token is valid but will expire soon.
+    EXPIRED = 3  # The token has expired and cannot be used.
 
 
 class Refreshable(TokenSource):
@@ -219,10 +236,12 @@ def _get_executor(cls):
                     cls._EXECUTOR = ThreadPoolExecutor(max_workers=10)
         return cls._EXECUTOR
 
-    def __init__(self,
-                 token: Token = None,
-                 disable_async: bool = True,
-                 stale_duration: timedelta = _DEFAULT_STALE_DURATION):
+    def __init__(
+        self,
+        token: Token = None,
+        disable_async: bool = True,
+        stale_duration: timedelta = _DEFAULT_STALE_DURATION,
+    ):
         # Config properties
         self._stale_duration = stale_duration
         self._disable_async = disable_async
@@ -300,7 +319,7 @@ def _refresh_internal():
                 # This happens on a thread, so we don't want to propagate the error.
                 # Instead, if there is no new_token for any reason, we will disable async refresh below
                 # But we will do it inside the lock.
-                logger.warning(f'Tried to refresh token asynchronously, but failed: {e}')
+                logger.warning(f"Tried to refresh token asynchronously, but failed: {e}")
 
             with self._lock:
                 if new_token is not None:
@@ -332,23 +351,24 @@ def log_message(self, fmt: str, *args: Any) -> None:
 
     def do_GET(self):
         from urllib.parse import parse_qsl
-        parts = self.path.split('?')
+
+        parts = self.path.split("?")
         if len(parts) != 2:
-            self.send_error(400, 'Missing Query')
+            self.send_error(400, "Missing Query")
             return
 
         query = dict(parse_qsl(parts[1]))
         self._feedback.append(query)
 
-        if 'error' in query:
-            self.send_error(400, query['error'], query.get('error_description'))
+        if "error" in query:
+            self.send_error(400, query["error"], query.get("error_description"))
             return
 
         self.send_response(200)
-        self.send_header('Content-type', 'text/html')
+        self.send_header("Content-type", "text/html")
         self.end_headers()
         # TODO: show better message
-        self.wfile.write(b'You can close this tab.')
+        self.wfile.write(b"You can close this tab.")
 
 
 def get_account_endpoints(host: str, account_id: str, client: _BaseClient = _BaseClient()) -> OidcEndpoints:
@@ -359,8 +379,8 @@ def get_account_endpoints(host: str, account_id: str, client: _BaseClient = _Bas
     :return: The account's OIDC endpoints.
     """
     host = _fix_host_if_needed(host)
-    oidc = f'{host}/oidc/accounts/{account_id}/.well-known/oauth-authorization-server'
-    resp = client.do('GET', oidc)
+    oidc = f"{host}/oidc/accounts/{account_id}/.well-known/oauth-authorization-server"
+    resp = client.do("GET", oidc)
     return OidcEndpoints.from_dict(resp)
 
 
@@ -371,12 +391,14 @@ def get_workspace_endpoints(host: str, client: _BaseClient = _BaseClient()) -> O
     :return: The workspace's OIDC endpoints.
     """
     host = _fix_host_if_needed(host)
-    oidc = f'{host}/oidc/.well-known/oauth-authorization-server'
-    resp = client.do('GET', oidc)
+    oidc = f"{host}/oidc/.well-known/oauth-authorization-server"
+    resp = client.do("GET", oidc)
     return OidcEndpoints.from_dict(resp)
 
 
-def get_azure_entra_id_workspace_endpoints(host: str) -> Optional[OidcEndpoints]:
+def get_azure_entra_id_workspace_endpoints(
+    host: str,
+) -> Optional[OidcEndpoints]:
     """
     Get the Azure Entra ID endpoints for a given workspace. Can only be used when authenticating to Azure Databricks
     using an application registered in Azure Entra ID.
@@ -385,22 +407,26 @@ def get_azure_entra_id_workspace_endpoints(host: str) -> Optional[OidcEndpoints]
     """
     # In Azure, this workspace endpoint redirects to the Entra ID authorization endpoint
     host = _fix_host_if_needed(host)
-    res = requests.get(f'{host}/oidc/oauth2/v2.0/authorize', allow_redirects=False)
-    real_auth_url = res.headers.get('location')
+    res = requests.get(f"{host}/oidc/oauth2/v2.0/authorize", allow_redirects=False)
+    real_auth_url = res.headers.get("location")
     if not real_auth_url:
         return None
-    return OidcEndpoints(authorization_endpoint=real_auth_url,
-                         token_endpoint=real_auth_url.replace('/authorize', '/token'))
+    return OidcEndpoints(
+        authorization_endpoint=real_auth_url,
+        token_endpoint=real_auth_url.replace("/authorize", "/token"),
+    )
 
 
 class SessionCredentials(Refreshable):
 
-    def __init__(self,
-                 token: Token,
-                 token_endpoint: str,
-                 client_id: str,
-                 client_secret: str = None,
-                 redirect_url: str = None):
+    def __init__(
+        self,
+        token: Token,
+        token_endpoint: str,
+        client_id: str,
+        client_secret: str = None,
+        redirect_url: str = None,
+    ):
         self._token_endpoint = token_endpoint
         self._client_id = client_id
         self._client_secret = client_secret
@@ -408,61 +434,72 @@ def __init__(self,
         super().__init__(token)
 
     def as_dict(self) -> dict:
-        return {'token': self.token().as_dict()}
+        return {"token": self.token().as_dict()}
 
     @staticmethod
-    def from_dict(raw: dict,
-                  token_endpoint: str,
-                  client_id: str,
-                  client_secret: str = None,
-                  redirect_url: str = None) -> 'SessionCredentials':
-        return SessionCredentials(token=Token.from_dict(raw['token']),
-                                  token_endpoint=token_endpoint,
-                                  client_id=client_id,
-                                  client_secret=client_secret,
-                                  redirect_url=redirect_url)
+    def from_dict(
+        raw: dict,
+        token_endpoint: str,
+        client_id: str,
+        client_secret: str = None,
+        redirect_url: str = None,
+    ) -> "SessionCredentials":
+        return SessionCredentials(
+            token=Token.from_dict(raw["token"]),
+            token_endpoint=token_endpoint,
+            client_id=client_id,
+            client_secret=client_secret,
+            redirect_url=redirect_url,
+        )
 
     def auth_type(self):
         """Implementing CredentialsProvider protocol"""
         # TODO: distinguish between Databricks IDP and Azure AD
-        return 'oauth'
+        return "oauth"
 
     def __call__(self, *args, **kwargs):
         """Implementing CredentialsProvider protocol"""
 
         def inner() -> Dict[str, str]:
-            return {'Authorization': f"Bearer {self.token().access_token}"}
+            return {"Authorization": f"Bearer {self.token().access_token}"}
 
         return inner
 
     def refresh(self) -> Token:
         refresh_token = self._token.refresh_token
         if not refresh_token:
-            raise ValueError('oauth2: token expired and refresh token is not set')
-        params = {'grant_type': 'refresh_token', 'refresh_token': refresh_token}
+            raise ValueError("oauth2: token expired and refresh token is not set")
+        params = {
+            "grant_type": "refresh_token",
+            "refresh_token": refresh_token,
+        }
         headers = {}
-        if 'microsoft' in self._token_endpoint:
+        if "microsoft" in self._token_endpoint:
             # Tokens issued for the 'Single-Page Application' client-type may
             # only be redeemed via cross-origin requests
-            headers = {'Origin': self._redirect_url}
-        return retrieve_token(client_id=self._client_id,
-                              client_secret=self._client_secret,
-                              token_url=self._token_endpoint,
-                              params=params,
-                              use_params=True,
-                              headers=headers)
+            headers = {"Origin": self._redirect_url}
+        return retrieve_token(
+            client_id=self._client_id,
+            client_secret=self._client_secret,
+            token_url=self._token_endpoint,
+            params=params,
+            use_params=True,
+            headers=headers,
+        )
 
 
 class Consent:
 
-    def __init__(self,
-                 state: str,
-                 verifier: str,
-                 authorization_url: str,
-                 redirect_url: str,
-                 token_endpoint: str,
-                 client_id: str,
-                 client_secret: str = None) -> None:
+    def __init__(
+        self,
+        state: str,
+        verifier: str,
+        authorization_url: str,
+        redirect_url: str,
+        token_endpoint: str,
+        client_id: str,
+        client_secret: str = None,
+    ) -> None:
         self._verifier = verifier
         self._state = state
         self._authorization_url = authorization_url
@@ -473,12 +510,12 @@ def __init__(self,
 
     def as_dict(self) -> dict:
         return {
-            'state': self._state,
-            'verifier': self._verifier,
-            'authorization_url': self._authorization_url,
-            'redirect_url': self._redirect_url,
-            'token_endpoint': self._token_endpoint,
-            'client_id': self._client_id,
+            "state": self._state,
+            "verifier": self._verifier,
+            "authorization_url": self._authorization_url,
+            "redirect_url": self._redirect_url,
+            "token_endpoint": self._token_endpoint,
+            "client_id": self._client_id,
         }
 
     @property
@@ -486,65 +523,74 @@ def authorization_url(self) -> str:
         return self._authorization_url
 
     @staticmethod
-    def from_dict(raw: dict, client_secret: str = None) -> 'Consent':
-        return Consent(raw['state'],
-                       raw['verifier'],
-                       authorization_url=raw['authorization_url'],
-                       redirect_url=raw['redirect_url'],
-                       token_endpoint=raw['token_endpoint'],
-                       client_id=raw['client_id'],
-                       client_secret=client_secret)
+    def from_dict(raw: dict, client_secret: str = None) -> "Consent":
+        return Consent(
+            raw["state"],
+            raw["verifier"],
+            authorization_url=raw["authorization_url"],
+            redirect_url=raw["redirect_url"],
+            token_endpoint=raw["token_endpoint"],
+            client_id=raw["client_id"],
+            client_secret=client_secret,
+        )
 
     def launch_external_browser(self) -> SessionCredentials:
         redirect_url = urllib.parse.urlparse(self._redirect_url)
-        if redirect_url.hostname not in ('localhost', '127.0.0.1'):
-            raise ValueError(f'cannot listen on {redirect_url.hostname}')
+        if redirect_url.hostname not in ("localhost", "127.0.0.1"):
+            raise ValueError(f"cannot listen on {redirect_url.hostname}")
         feedback = []
-        logger.info(f'Opening {self._authorization_url} in a browser')
+        logger.info(f"Opening {self._authorization_url} in a browser")
         webbrowser.open_new(self._authorization_url)
         port = redirect_url.port
         handler_factory = functools.partial(_OAuthCallback, feedback)
         with HTTPServer(("localhost", port), handler_factory) as httpd:
-            logger.info(f'Waiting for redirect to http://localhost:{port}')
+            logger.info(f"Waiting for redirect to http://localhost:{port}")
             httpd.handle_request()
         if not feedback:
-            raise ValueError('No data received in callback')
+            raise ValueError("No data received in callback")
         query = feedback.pop()
         return self.exchange_callback_parameters(query)
 
     def exchange_callback_parameters(self, query: Dict[str, str]) -> SessionCredentials:
-        if 'error' in query:
-            raise ValueError('{error}: {error_description}'.format(**query))
-        if 'code' not in query or 'state' not in query:
-            raise ValueError('No code returned in callback')
-        return self.exchange(query['code'], query['state'])
+        if "error" in query:
+            raise ValueError("{error}: {error_description}".format(**query))
+        if "code" not in query or "state" not in query:
+            raise ValueError("No code returned in callback")
+        return self.exchange(query["code"], query["state"])
 
     def exchange(self, code: str, state: str) -> SessionCredentials:
         if self._state != state:
-            raise ValueError('state mismatch')
+            raise ValueError("state mismatch")
         params = {
-            'redirect_uri': self._redirect_url,
-            'grant_type': 'authorization_code',
-            'code_verifier': self._verifier,
-            'code': code
+            "redirect_uri": self._redirect_url,
+            "grant_type": "authorization_code",
+            "code_verifier": self._verifier,
+            "code": code,
         }
         headers = {}
         while True:
             try:
-                token = retrieve_token(client_id=self._client_id,
-                                       client_secret=self._client_secret,
-                                       token_url=self._token_endpoint,
-                                       params=params,
-                                       headers=headers,
-                                       use_params=True)
-                return SessionCredentials(token, self._token_endpoint, self._client_id, self._client_secret,
-                                          self._redirect_url)
+                token = retrieve_token(
+                    client_id=self._client_id,
+                    client_secret=self._client_secret,
+                    token_url=self._token_endpoint,
+                    params=params,
+                    headers=headers,
+                    use_params=True,
+                )
+                return SessionCredentials(
+                    token,
+                    self._token_endpoint,
+                    self._client_id,
+                    self._client_secret,
+                    self._redirect_url,
+                )
             except ValueError as e:
                 if NO_ORIGIN_FOR_SPA_CLIENT_ERROR in str(e):
                     # Retry in cases of 'Single-Page Application' client-type with
                     # 'Origin' header equal to client's redirect URL.
-                    headers['Origin'] = self._redirect_url
-                    msg = f'Retrying OAuth token exchange with {self._redirect_url} origin'
+                    headers["Origin"] = self._redirect_url
+                    msg = f"Retrying OAuth token exchange with {self._redirect_url} origin"
                     logger.debug(msg)
                     continue
                 raise e
@@ -569,15 +615,17 @@ class OAuthClient:
     exchange it for a token without possessing the Code Verifier.
     """
 
-    def __init__(self,
-                 oidc_endpoints: OidcEndpoints,
-                 redirect_url: str,
-                 client_id: str,
-                 scopes: List[str] = None,
-                 client_secret: str = None):
+    def __init__(
+        self,
+        oidc_endpoints: OidcEndpoints,
+        redirect_url: str,
+        client_id: str,
+        scopes: List[str] = None,
+        client_secret: str = None,
+    ):
 
         if not scopes:
-            scopes = ['all-apis']
+            scopes = ["all-apis"]
 
         self.redirect_url = redirect_url
         self._client_id = client_id
@@ -586,25 +634,27 @@ def __init__(self,
         self._scopes = scopes
 
     @staticmethod
-    def from_host(host: str,
-                  client_id: str,
-                  redirect_url: str,
-                  *,
-                  scopes: List[str] = None,
-                  client_secret: str = None) -> 'OAuthClient':
+    def from_host(
+        host: str,
+        client_id: str,
+        redirect_url: str,
+        *,
+        scopes: List[str] = None,
+        client_secret: str = None,
+    ) -> "OAuthClient":
         from .core import Config
         from .credentials_provider import credentials_strategy
 
-        @credentials_strategy('noop', [])
+        @credentials_strategy("noop", [])
         def noop_credentials(_: any):
             return lambda: {}
 
         config = Config(host=host, credentials_strategy=noop_credentials)
         if not scopes:
-            scopes = ['all-apis']
+            scopes = ["all-apis"]
         oidc = config.oidc_endpoints
         if not oidc:
-            raise ValueError(f'{host} does not support OAuth')
+            raise ValueError(f"{host} does not support OAuth")
         return OAuthClient(oidc, redirect_url, client_id, scopes, client_secret)
 
     def initiate_consent(self) -> Consent:
@@ -613,28 +663,30 @@ def initiate_consent(self) -> Consent:
         # token_urlsafe() already returns base64-encoded string
         verifier = secrets.token_urlsafe(32)
         digest = hashlib.sha256(verifier.encode("UTF-8")).digest()
-        challenge = (base64.urlsafe_b64encode(digest).decode("UTF-8").replace("=", ""))
+        challenge = base64.urlsafe_b64encode(digest).decode("UTF-8").replace("=", "")
 
         params = {
-            'response_type': 'code',
-            'client_id': self._client_id,
-            'redirect_uri': self.redirect_url,
-            'scope': ' '.join(self._scopes),
-            'state': state,
-            'code_challenge': challenge,
-            'code_challenge_method': 'S256'
+            "response_type": "code",
+            "client_id": self._client_id,
+            "redirect_uri": self.redirect_url,
+            "scope": " ".join(self._scopes),
+            "state": state,
+            "code_challenge": challenge,
+            "code_challenge_method": "S256",
         }
-        auth_url = f'{self._oidc_endpoints.authorization_endpoint}?{urllib.parse.urlencode(params)}'
-        return Consent(state,
-                       verifier,
-                       authorization_url=auth_url,
-                       redirect_url=self.redirect_url,
-                       token_endpoint=self._oidc_endpoints.token_endpoint,
-                       client_id=self._client_id,
-                       client_secret=self._client_secret)
+        auth_url = f"{self._oidc_endpoints.authorization_endpoint}?{urllib.parse.urlencode(params)}"
+        return Consent(
+            state,
+            verifier,
+            authorization_url=auth_url,
+            redirect_url=self.redirect_url,
+            token_endpoint=self._oidc_endpoints.token_endpoint,
+            client_id=self._client_id,
+            client_secret=self._client_secret,
+        )
 
     def __repr__(self) -> str:
-        return f''
+        return f""
 
 
 @dataclass
@@ -648,6 +700,7 @@ class ClientCredentials(Refreshable):
     the background job uses the Client ID and Client Secret to obtain
     an Access Token from the Authorization Server.
     """
+
     client_id: str
     client_secret: str
     token_url: str
@@ -666,24 +719,28 @@ def refresh(self) -> Token:
         if self.endpoint_params:
             for k, v in self.endpoint_params.items():
                 params[k] = v
-        return retrieve_token(self.client_id,
-                              self.client_secret,
-                              self.token_url,
-                              params,
-                              use_params=self.use_params,
-                              use_header=self.use_header)
+        return retrieve_token(
+            self.client_id,
+            self.client_secret,
+            self.token_url,
+            params,
+            use_params=self.use_params,
+            use_header=self.use_header,
+        )
 
 
 class TokenCache:
     BASE_PATH = "~/.config/databricks-sdk-py/oauth"
 
-    def __init__(self,
-                 host: str,
-                 oidc_endpoints: OidcEndpoints,
-                 client_id: str,
-                 redirect_url: str = None,
-                 client_secret: str = None,
-                 scopes: List[str] = None) -> None:
+    def __init__(
+        self,
+        host: str,
+        oidc_endpoints: OidcEndpoints,
+        client_id: str,
+        redirect_url: str = None,
+        client_secret: str = None,
+        scopes: List[str] = None,
+    ) -> None:
         self._host = host
         self._client_id = client_id
         self._oidc_endpoints = oidc_endpoints
@@ -695,8 +752,12 @@ def __init__(self,
     def filename(self) -> str:
         # Include host, client_id, and scopes in the cache filename to make it unique.
         hash = hashlib.sha256()
-        for chunk in [self._host, self._client_id, ",".join(self._scopes), ]:
-            hash.update(chunk.encode('utf-8'))
+        for chunk in [
+            self._host,
+            self._client_id,
+            ",".join(self._scopes),
+        ]:
+            hash.update(chunk.encode("utf-8"))
         return os.path.expanduser(os.path.join(self.__class__.BASE_PATH, hash.hexdigest() + ".json"))
 
     def load(self) -> Optional[SessionCredentials]:
@@ -707,13 +768,15 @@ def load(self) -> Optional[SessionCredentials]:
             return None
 
         try:
-            with open(self.filename, 'r') as f:
+            with open(self.filename, "r") as f:
                 raw = json.load(f)
-                return SessionCredentials.from_dict(raw,
-                                                    token_endpoint=self._oidc_endpoints.token_endpoint,
-                                                    client_id=self._client_id,
-                                                    client_secret=self._client_secret,
-                                                    redirect_url=self._redirect_url)
+                return SessionCredentials.from_dict(
+                    raw,
+                    token_endpoint=self._oidc_endpoints.token_endpoint,
+                    client_id=self._client_id,
+                    client_secret=self._client_secret,
+                    redirect_url=self._redirect_url,
+                )
         except Exception:
             return None
 
@@ -722,6 +785,6 @@ def save(self, credentials: SessionCredentials) -> None:
         Save credentials to cache file.
         """
         os.makedirs(os.path.dirname(self.filename), exist_ok=True)
-        with open(self.filename, 'w') as f:
+        with open(self.filename, "w") as f:
             json.dump(credentials.as_dict(), f)
         os.chmod(self.filename, 0o600)
diff --git a/databricks/sdk/retries.py b/databricks/sdk/retries.py
index 4f55087ea..e4408929d 100644
--- a/databricks/sdk/retries.py
+++ b/databricks/sdk/retries.py
@@ -9,16 +9,18 @@
 logger = logging.getLogger(__name__)
 
 
-def retried(*,
-            on: Sequence[Type[BaseException]] = None,
-            is_retryable: Callable[[BaseException], Optional[str]] = None,
-            timeout=timedelta(minutes=20),
-            clock: Clock = None,
-            before_retry: Callable = None):
+def retried(
+    *,
+    on: Sequence[Type[BaseException]] = None,
+    is_retryable: Callable[[BaseException], Optional[str]] = None,
+    timeout=timedelta(minutes=20),
+    clock: Clock = None,
+    before_retry: Callable = None,
+):
     has_allowlist = on is not None
     has_callback = is_retryable is not None
     if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
-        raise SyntaxError('either on=[Exception] or callback=lambda x: .. is required')
+        raise SyntaxError("either on=[Exception] or callback=lambda x: .. is required")
     if clock is None:
         clock = RealClock()
 
@@ -37,30 +39,30 @@ def wrapper(*args, **kwargs):
                     retry_reason = None
                     # sleep 10s max per attempt, unless it's HTTP 429 or 503
                     sleep = min(10, attempt)
-                    retry_after_secs = getattr(err, 'retry_after_secs', None)
+                    retry_after_secs = getattr(err, "retry_after_secs", None)
                     if retry_after_secs is not None:
                         # cannot depend on DatabricksError directly because of circular dependency
                         sleep = retry_after_secs
-                        retry_reason = 'throttled by platform'
+                        retry_reason = "throttled by platform"
                     elif is_retryable is not None:
                         retry_reason = is_retryable(err)
                     elif on is not None:
                         for err_type in on:
                             if not isinstance(err, err_type):
                                 continue
-                            retry_reason = f'{type(err).__name__} is allowed to retry'
+                            retry_reason = f"{type(err).__name__} is allowed to retry"
 
                     if retry_reason is None:
                         # raise if exception is not retryable
                         raise err
 
-                    logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
+                    logger.debug(f"Retrying: {retry_reason} (sleeping ~{sleep}s)")
                     if before_retry:
                         before_retry()
 
                     clock.sleep(sleep + random())
                     attempt += 1
-            raise TimeoutError(f'Timed out after {timeout}') from last_err
+            raise TimeoutError(f"Timed out after {timeout}") from last_err
 
         return wrapper
 
diff --git a/databricks/sdk/runtime/__init__.py b/databricks/sdk/runtime/__init__.py
index 9230c7a83..c4bfd042b 100644
--- a/databricks/sdk/runtime/__init__.py
+++ b/databricks/sdk/runtime/__init__.py
@@ -3,14 +3,23 @@
 import logging
 from typing import Dict, Optional, Union, cast
 
-logger = logging.getLogger('databricks.sdk')
+logger = logging.getLogger("databricks.sdk")
 is_local_implementation = True
 
 # All objects that are injected into the Notebook's user namespace should also be made
 # available to be imported from databricks.sdk.runtime.globals. This import can be used
 # in Python modules so users can access these objects from Files more easily.
 dbruntime_objects = [
-    "display", "displayHTML", "dbutils", "table", "sql", "udf", "getArgument", "sc", "sqlContext", "spark"
+    "display",
+    "displayHTML",
+    "dbutils",
+    "table",
+    "sql",
+    "udf",
+    "getArgument",
+    "sc",
+    "sqlContext",
+    "spark",
 ]
 
 # DO NOT MOVE THE TRY-CATCH BLOCK BELOW AND DO NOT ADD THINGS BEFORE IT! WILL MAKE TEST FAIL.
@@ -18,7 +27,8 @@
     # We don't want to expose additional entity to user namespace, so
     # a workaround here for exposing required information in notebook environment
     from dbruntime.sdk_credential_provider import init_runtime_native_auth
-    logger.debug('runtime SDK credential provider available')
+
+    logger.debug("runtime SDK credential provider available")
     dbruntime_objects.append("init_runtime_native_auth")
 except ImportError:
     init_runtime_native_auth = None
@@ -29,18 +39,19 @@
 def init_runtime_repl_auth():
     try:
         from dbruntime.databricks_repl_context import get_context
+
         ctx = get_context()
         if ctx is None:
-            logger.debug('Empty REPL context returned, skipping runtime auth')
+            logger.debug("Empty REPL context returned, skipping runtime auth")
             return None, None
         if ctx.workspaceUrl is None:
-            logger.debug('Workspace URL is not available, skipping runtime auth')
+            logger.debug("Workspace URL is not available, skipping runtime auth")
             return None, None
-        host = f'https://{ctx.workspaceUrl}'
+        host = f"https://{ctx.workspaceUrl}"
 
         def inner() -> Dict[str, str]:
             ctx = get_context()
-            return {'Authorization': f'Bearer {ctx.apiToken}'}
+            return {"Authorization": f"Bearer {ctx.apiToken}"}
 
         return host, inner
     except ImportError:
@@ -50,11 +61,12 @@ def inner() -> Dict[str, str]:
 def init_runtime_legacy_auth():
     try:
         import IPython
+
         ip_shell = IPython.get_ipython()
         if ip_shell is None:
             return None, None
         global_ns = ip_shell.ns_table["user_global"]
-        if 'dbutils' not in global_ns:
+        if "dbutils" not in global_ns:
             return None, None
         dbutils = global_ns["dbutils"].notebook.entry_point.getDbutils()
         if dbutils is None:
@@ -62,11 +74,11 @@ def init_runtime_legacy_auth():
         ctx = dbutils.notebook().getContext()
         if ctx is None:
             return None, None
-        host = getattr(ctx, 'apiUrl')().get()
+        host = getattr(ctx, "apiUrl")().get()
 
         def inner() -> Dict[str, str]:
             ctx = dbutils.notebook().getContext()
-            return {'Authorization': f'Bearer {getattr(ctx, "apiToken")().get()}'}
+            return {"Authorization": f'Bearer {getattr(ctx, "apiToken")().get()}'}
 
         return host, inner
     except ImportError:
@@ -97,7 +109,8 @@ def inner() -> Dict[str, str]:
     try:
         # We expect this to fail and only do this for providing types
         from pyspark.sql.context import SQLContext
-        sqlContext: SQLContext = None # type: ignore
+
+        sqlContext: SQLContext = None  # type: ignore
         table = sqlContext.table
     except Exception as e:
         logging.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
@@ -109,8 +122,9 @@ def inner() -> Dict[str, str]:
 
     try:
         from databricks.connect import DatabricksSession  # type: ignore
+
         spark = DatabricksSession.builder.getOrCreate()
-        sql = spark.sql # type: ignore
+        sql = spark.sql  # type: ignore
     except Exception as e:
         # We are ignoring all failures here because user might want to initialize
         # spark session themselves and we don't want to interfere with that
@@ -122,7 +136,7 @@ def inner() -> Dict[str, str]:
     except Exception as e:
         logging.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
 
-    def display(input=None, *args, **kwargs) -> None: # type: ignore
+    def display(input=None, *args, **kwargs) -> None:  # type: ignore
         """
         Display plots or data.
         Display plot:
@@ -144,9 +158,10 @@ def display(input=None, *args, **kwargs) -> None: # type: ignore
         """
         # Import inside the function so that imports are only triggered on usage.
         from IPython import display as IPDisplay
-        return IPDisplay.display(input, *args, **kwargs) # type: ignore
 
-    def displayHTML(html) -> None: # type: ignore
+        return IPDisplay.display(input, *args, **kwargs)  # type: ignore
+
+    def displayHTML(html) -> None:  # type: ignore
         """
         Display HTML data.
         Parameters
@@ -160,13 +175,15 @@ def displayHTML(html) -> None: # type: ignore
         """
         # Import inside the function so that imports are only triggered on usage.
         from IPython import display as IPDisplay
-        return IPDisplay.display_html(html, raw=True) # type: ignore
+
+        return IPDisplay.display_html(html, raw=True)  # type: ignore
 
     # We want to propagate the error in initialising dbutils because this is a core
     # functionality of the sdk
     from databricks.sdk.dbutils import RemoteDbUtils
 
     from . import dbutils_stub
+
     dbutils_type = Union[dbutils_stub.dbutils, RemoteDbUtils]
 
     dbutils = RemoteDbUtils()
diff --git a/databricks/sdk/runtime/dbutils_stub.py b/databricks/sdk/runtime/dbutils_stub.py
index 5a1882be8..363436e1f 100644
--- a/databricks/sdk/runtime/dbutils_stub.py
+++ b/databricks/sdk/runtime/dbutils_stub.py
@@ -2,21 +2,21 @@
 from collections import namedtuple
 
 
-class FileInfo(namedtuple('FileInfo', ['path', 'name', 'size', "modificationTime"])):
+class FileInfo(namedtuple("FileInfo", ["path", "name", "size", "modificationTime"])):
     pass
 
 
-class MountInfo(namedtuple('MountInfo', ['mountPoint', 'source', 'encryptionType'])):
+class MountInfo(namedtuple("MountInfo", ["mountPoint", "source", "encryptionType"])):
     pass
 
 
-class SecretScope(namedtuple('SecretScope', ['name'])):
+class SecretScope(namedtuple("SecretScope", ["name"])):
 
     def getName(self):
         return self.name
 
 
-class SecretMetadata(namedtuple('SecretMetadata', ['key'])):
+class SecretMetadata(namedtuple("SecretMetadata", ["key"])):
     pass
 
 
@@ -49,8 +49,7 @@ def showRoles() -> typing.List[str]:
             ...
 
         @staticmethod
-        def getCurrentCredentials() -> typing.Mapping[str, str]:
-            ...
+        def getCurrentCredentials() -> typing.Mapping[str, str]: ...
 
     class data:
         """
@@ -129,40 +128,38 @@ def rm(dir: str, recurse: bool = False) -> bool:
             ...
 
         @staticmethod
-        def cacheFiles(*files):
-            ...
+        def cacheFiles(*files): ...
 
         @staticmethod
-        def cacheTable(name: str):
-            ...
+        def cacheTable(name: str): ...
 
         @staticmethod
-        def uncacheFiles(*files):
-            ...
+        def uncacheFiles(*files): ...
 
         @staticmethod
-        def uncacheTable(name: str):
-            ...
+        def uncacheTable(name: str): ...
 
         @staticmethod
-        def mount(source: str,
-                  mount_point: str,
-                  encryption_type: str = "",
-                  owner: typing.Optional[str] = None,
-                  extra_configs: typing.Mapping[str, str] = {},
-                  ) -> bool:
+        def mount(
+            source: str,
+            mount_point: str,
+            encryption_type: str = "",
+            owner: typing.Optional[str] = None,
+            extra_configs: typing.Mapping[str, str] = {},
+        ) -> bool:
             """
             Mounts the given source directory into DBFS at the given mount point
             """
             ...
 
         @staticmethod
-        def updateMount(source: str,
-                        mount_point: str,
-                        encryption_type: str = "",
-                        owner: typing.Optional[str] = None,
-                        extra_configs: typing.Mapping[str, str] = {},
-                        ) -> bool:
+        def updateMount(
+            source: str,
+            mount_point: str,
+            encryption_type: str = "",
+            owner: typing.Optional[str] = None,
+            extra_configs: typing.Mapping[str, str] = {},
+        ) -> bool:
             """
             Similar to mount(), but updates an existing mount point (if present) instead of creating a new one
             """
@@ -200,7 +197,12 @@ class taskValues:
             """
 
             @staticmethod
-            def get(taskKey: str, key: str, default: any = None, debugValue: any = None) -> None:
+            def get(
+                taskKey: str,
+                key: str,
+                default: any = None,
+                debugValue: any = None,
+            ) -> None:
                 """
                 Returns the latest task value that belongs to the current job run
                 """
@@ -238,7 +240,11 @@ def exit(value: str) -> None:
             ...
 
         @staticmethod
-        def run(path: str, timeout_seconds: int, arguments: typing.Mapping[str, str]) -> str:
+        def run(
+            path: str,
+            timeout_seconds: int,
+            arguments: typing.Mapping[str, str],
+        ) -> str:
             """
             This method runs a notebook and returns its exit value
             """
@@ -307,7 +313,12 @@ def text(name: str, defaultValue: str, label: str = None):
             ...
 
         @staticmethod
-        def dropdown(name: str, defaultValue: str, choices: typing.List[str], label: str = None):
+        def dropdown(
+            name: str,
+            defaultValue: str,
+            choices: typing.List[str],
+            label: str = None,
+        ):
             """Creates a dropdown input widget with given specification.
             :param name: Name of argument associated with the new input widget
             :param defaultValue: Default value of the input widget (must be one of choices)
@@ -317,11 +328,12 @@ def dropdown(name: str, defaultValue: str, choices: typing.List[str], label: str
             ...
 
         @staticmethod
-        def combobox(name: str,
-                     defaultValue: str,
-                     choices: typing.List[str],
-                     label: typing.Optional[str] = None,
-                     ):
+        def combobox(
+            name: str,
+            defaultValue: str,
+            choices: typing.List[str],
+            label: typing.Optional[str] = None,
+        ):
             """Creates a combobox input widget with given specification.
             :param name: Name of argument associated with the new input widget
             :param defaultValue: Default value of the input widget
@@ -331,11 +343,12 @@ def combobox(name: str,
             ...
 
         @staticmethod
-        def multiselect(name: str,
-                        defaultValue: str,
-                        choices: typing.List[str],
-                        label: typing.Optional[str] = None,
-                        ):
+        def multiselect(
+            name: str,
+            defaultValue: str,
+            choices: typing.List[str],
+            label: typing.Optional[str] = None,
+        ):
             """Creates a multiselect input widget with given specification.
             :param name: Name of argument associated with the new input widget
             :param defaultValue: Default value of the input widget (must be one of choices)
diff --git a/databricks/sdk/service/_internal.py b/databricks/sdk/service/_internal.py
index 063837942..1e501e0e0 100644
--- a/databricks/sdk/service/_internal.py
+++ b/databricks/sdk/service/_internal.py
@@ -6,18 +6,21 @@
 def _from_dict(d: Dict[str, any], field: str, cls: Type) -> any:
     if field not in d or d[field] is None:
         return None
-    return getattr(cls, 'from_dict')(d[field])
+    return getattr(cls, "from_dict")(d[field])
 
 
 def _repeated_dict(d: Dict[str, any], field: str, cls: Type) -> any:
     if field not in d or not d[field]:
         return []
-    from_dict = getattr(cls, 'from_dict')
+    from_dict = getattr(cls, "from_dict")
     return [from_dict(v) for v in d[field]]
 
 
 def _get_enum_value(cls: Type, value: str) -> Optional[Type]:
-    return next((v for v in getattr(cls, '__members__').values() if v.value == value), None)
+    return next(
+        (v for v in getattr(cls, "__members__").values() if v.value == value),
+        None,
+    )
 
 
 def _enum(d: Dict[str, any], field: str, cls: Type) -> any:
@@ -43,7 +46,7 @@ def _escape_multi_segment_path_parameter(param: str) -> str:
     return urllib.parse.quote(param)
 
 
-ReturnType = TypeVar('ReturnType')
+ReturnType = TypeVar("ReturnType")
 
 
 class Wait(Generic[ReturnType]):
@@ -60,8 +63,10 @@ def __getattr__(self, key) -> any:
     def bind(self) -> dict:
         return self._bind
 
-    def result(self,
-               timeout: datetime.timedelta = datetime.timedelta(minutes=20),
-               callback: Callable[[ReturnType], None] = None) -> ReturnType:
+    def result(
+        self,
+        timeout: datetime.timedelta = datetime.timedelta(minutes=20),
+        callback: Callable[[ReturnType], None] = None,
+    ) -> ReturnType:
         kwargs = self._bind.copy()
         return self._waiter(callback=callback, timeout=timeout, **kwargs)
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index d15a6bef2..a6eea8bf5 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -73,73 +73,103 @@ class App:
     def as_dict(self) -> dict:
         """Serializes the App into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict()
-        if self.app_status: body['app_status'] = self.app_status.as_dict()
-        if self.compute_status: body['compute_status'] = self.compute_status.as_dict()
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.creator is not None: body['creator'] = self.creator
+        if self.active_deployment:
+            body["active_deployment"] = self.active_deployment.as_dict()
+        if self.app_status:
+            body["app_status"] = self.app_status.as_dict()
+        if self.compute_status:
+            body["compute_status"] = self.compute_status.as_dict()
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.creator is not None:
+            body["creator"] = self.creator
         if self.default_source_code_path is not None:
-            body['default_source_code_path'] = self.default_source_code_path
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
-        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+            body["default_source_code_path"] = self.default_source_code_path
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pending_deployment:
+            body["pending_deployment"] = self.pending_deployment.as_dict()
+        if self.resources:
+            body["resources"] = [v.as_dict() for v in self.resources]
         if self.service_principal_client_id is not None:
-            body['service_principal_client_id'] = self.service_principal_client_id
-        if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
+            body["service_principal_client_id"] = self.service_principal_client_id
+        if self.service_principal_id is not None:
+            body["service_principal_id"] = self.service_principal_id
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.updater is not None: body['updater'] = self.updater
-        if self.url is not None: body['url'] = self.url
+            body["service_principal_name"] = self.service_principal_name
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.updater is not None:
+            body["updater"] = self.updater
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the App into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.active_deployment: body['active_deployment'] = self.active_deployment
-        if self.app_status: body['app_status'] = self.app_status
-        if self.compute_status: body['compute_status'] = self.compute_status
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.creator is not None: body['creator'] = self.creator
+        if self.active_deployment:
+            body["active_deployment"] = self.active_deployment
+        if self.app_status:
+            body["app_status"] = self.app_status
+        if self.compute_status:
+            body["compute_status"] = self.compute_status
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.creator is not None:
+            body["creator"] = self.creator
         if self.default_source_code_path is not None:
-            body['default_source_code_path'] = self.default_source_code_path
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.pending_deployment: body['pending_deployment'] = self.pending_deployment
-        if self.resources: body['resources'] = self.resources
+            body["default_source_code_path"] = self.default_source_code_path
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pending_deployment:
+            body["pending_deployment"] = self.pending_deployment
+        if self.resources:
+            body["resources"] = self.resources
         if self.service_principal_client_id is not None:
-            body['service_principal_client_id'] = self.service_principal_client_id
-        if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
+            body["service_principal_client_id"] = self.service_principal_client_id
+        if self.service_principal_id is not None:
+            body["service_principal_id"] = self.service_principal_id
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.updater is not None: body['updater'] = self.updater
-        if self.url is not None: body['url'] = self.url
+            body["service_principal_name"] = self.service_principal_name
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.updater is not None:
+            body["updater"] = self.updater
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> App:
         """Deserializes the App from a dictionary."""
-        return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment),
-                   app_status=_from_dict(d, 'app_status', ApplicationStatus),
-                   compute_status=_from_dict(d, 'compute_status', ComputeStatus),
-                   create_time=d.get('create_time', None),
-                   creator=d.get('creator', None),
-                   default_source_code_path=d.get('default_source_code_path', None),
-                   description=d.get('description', None),
-                   id=d.get('id', None),
-                   name=d.get('name', None),
-                   pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
-                   resources=_repeated_dict(d, 'resources', AppResource),
-                   service_principal_client_id=d.get('service_principal_client_id', None),
-                   service_principal_id=d.get('service_principal_id', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   update_time=d.get('update_time', None),
-                   updater=d.get('updater', None),
-                   url=d.get('url', None))
+        return cls(
+            active_deployment=_from_dict(d, "active_deployment", AppDeployment),
+            app_status=_from_dict(d, "app_status", ApplicationStatus),
+            compute_status=_from_dict(d, "compute_status", ComputeStatus),
+            create_time=d.get("create_time", None),
+            creator=d.get("creator", None),
+            default_source_code_path=d.get("default_source_code_path", None),
+            description=d.get("description", None),
+            id=d.get("id", None),
+            name=d.get("name", None),
+            pending_deployment=_from_dict(d, "pending_deployment", AppDeployment),
+            resources=_repeated_dict(d, "resources", AppResource),
+            service_principal_client_id=d.get("service_principal_client_id", None),
+            service_principal_id=d.get("service_principal_id", None),
+            service_principal_name=d.get("service_principal_name", None),
+            update_time=d.get("update_time", None),
+            updater=d.get("updater", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -159,30 +189,38 @@ class AppAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the AppAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest:
         """Deserializes the AppAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', AppPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", AppPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -205,33 +243,43 @@ class AppAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the AppAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse:
         """Deserializes the AppAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', AppPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", AppPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -267,40 +315,58 @@ class AppDeployment:
     def as_dict(self) -> dict:
         """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.creator is not None: body['creator'] = self.creator
-        if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict()
-        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
-        if self.mode is not None: body['mode'] = self.mode.value
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
-        if self.status: body['status'] = self.status.as_dict()
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.deployment_artifacts:
+            body["deployment_artifacts"] = self.deployment_artifacts.as_dict()
+        if self.deployment_id is not None:
+            body["deployment_id"] = self.deployment_id
+        if self.mode is not None:
+            body["mode"] = self.mode.value
+        if self.source_code_path is not None:
+            body["source_code_path"] = self.source_code_path
+        if self.status:
+            body["status"] = self.status.as_dict()
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppDeployment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.creator is not None: body['creator'] = self.creator
-        if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts
-        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
-        if self.mode is not None: body['mode'] = self.mode
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
-        if self.status: body['status'] = self.status
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.deployment_artifacts:
+            body["deployment_artifacts"] = self.deployment_artifacts
+        if self.deployment_id is not None:
+            body["deployment_id"] = self.deployment_id
+        if self.mode is not None:
+            body["mode"] = self.mode
+        if self.source_code_path is not None:
+            body["source_code_path"] = self.source_code_path
+        if self.status:
+            body["status"] = self.status
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeployment:
         """Deserializes the AppDeployment from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   creator=d.get('creator', None),
-                   deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts),
-                   deployment_id=d.get('deployment_id', None),
-                   mode=_enum(d, 'mode', AppDeploymentMode),
-                   source_code_path=d.get('source_code_path', None),
-                   status=_from_dict(d, 'status', AppDeploymentStatus),
-                   update_time=d.get('update_time', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            creator=d.get("creator", None),
+            deployment_artifacts=_from_dict(d, "deployment_artifacts", AppDeploymentArtifacts),
+            deployment_id=d.get("deployment_id", None),
+            mode=_enum(d, "mode", AppDeploymentMode),
+            source_code_path=d.get("source_code_path", None),
+            status=_from_dict(d, "status", AppDeploymentStatus),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -311,33 +377,35 @@ class AppDeploymentArtifacts:
     def as_dict(self) -> dict:
         """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        if self.source_code_path is not None:
+            body["source_code_path"] = self.source_code_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppDeploymentArtifacts into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        if self.source_code_path is not None:
+            body["source_code_path"] = self.source_code_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts:
         """Deserializes the AppDeploymentArtifacts from a dictionary."""
-        return cls(source_code_path=d.get('source_code_path', None))
+        return cls(source_code_path=d.get("source_code_path", None))
 
 
 class AppDeploymentMode(Enum):
 
-    AUTO_SYNC = 'AUTO_SYNC'
-    SNAPSHOT = 'SNAPSHOT'
+    AUTO_SYNC = "AUTO_SYNC"
+    SNAPSHOT = "SNAPSHOT"
 
 
 class AppDeploymentState(Enum):
 
-    CANCELLED = 'CANCELLED'
-    FAILED = 'FAILED'
-    IN_PROGRESS = 'IN_PROGRESS'
-    SUCCEEDED = 'SUCCEEDED'
+    CANCELLED = "CANCELLED"
+    FAILED = "FAILED"
+    IN_PROGRESS = "IN_PROGRESS"
+    SUCCEEDED = "SUCCEEDED"
 
 
 @dataclass
@@ -351,21 +419,28 @@ class AppDeploymentStatus:
     def as_dict(self) -> dict:
         """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppDeploymentStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus:
         """Deserializes the AppDeploymentStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState))
+        return cls(
+            message=d.get("message", None),
+            state=_enum(d, "state", AppDeploymentState),
+        )
 
 
 @dataclass
@@ -380,32 +455,40 @@ class AppPermission:
     def as_dict(self) -> dict:
         """Serializes the AppPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermission:
         """Deserializes the AppPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', AppPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", AppPermissionLevel),
+        )
 
 
 class AppPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_USE = 'CAN_USE'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_USE = "CAN_USE"
 
 
 @dataclass
@@ -420,25 +503,32 @@ def as_dict(self) -> dict:
         """Serializes the AppPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissions:
         """Deserializes the AppPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -451,22 +541,28 @@ class AppPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription:
         """Deserializes the AppPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', AppPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", AppPermissionLevel),
+        )
 
 
 @dataclass
@@ -480,22 +576,27 @@ def as_dict(self) -> dict:
         """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.app_name is not None: body['app_name'] = self.app_name
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.app_name is not None:
+            body["app_name"] = self.app_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.app_name is not None: body['app_name'] = self.app_name
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.app_name is not None:
+            body["app_name"] = self.app_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest:
         """Deserializes the AppPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlRequest),
-                   app_name=d.get('app_name', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlRequest),
+            app_name=d.get("app_name", None),
+        )
 
 
 @dataclass
@@ -517,34 +618,48 @@ class AppResource:
     def as_dict(self) -> dict:
         """Serializes the AppResource into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.job: body['job'] = self.job.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.secret: body['secret'] = self.secret.as_dict()
-        if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint.as_dict()
-        if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict()
+        if self.description is not None:
+            body["description"] = self.description
+        if self.job:
+            body["job"] = self.job.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.secret:
+            body["secret"] = self.secret.as_dict()
+        if self.serving_endpoint:
+            body["serving_endpoint"] = self.serving_endpoint.as_dict()
+        if self.sql_warehouse:
+            body["sql_warehouse"] = self.sql_warehouse.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppResource into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.job: body['job'] = self.job
-        if self.name is not None: body['name'] = self.name
-        if self.secret: body['secret'] = self.secret
-        if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint
-        if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse
+        if self.description is not None:
+            body["description"] = self.description
+        if self.job:
+            body["job"] = self.job
+        if self.name is not None:
+            body["name"] = self.name
+        if self.secret:
+            body["secret"] = self.secret
+        if self.serving_endpoint:
+            body["serving_endpoint"] = self.serving_endpoint
+        if self.sql_warehouse:
+            body["sql_warehouse"] = self.sql_warehouse
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResource:
         """Deserializes the AppResource from a dictionary."""
-        return cls(description=d.get('description', None),
-                   job=_from_dict(d, 'job', AppResourceJob),
-                   name=d.get('name', None),
-                   secret=_from_dict(d, 'secret', AppResourceSecret),
-                   serving_endpoint=_from_dict(d, 'serving_endpoint', AppResourceServingEndpoint),
-                   sql_warehouse=_from_dict(d, 'sql_warehouse', AppResourceSqlWarehouse))
+        return cls(
+            description=d.get("description", None),
+            job=_from_dict(d, "job", AppResourceJob),
+            name=d.get("name", None),
+            secret=_from_dict(d, "secret", AppResourceSecret),
+            serving_endpoint=_from_dict(d, "serving_endpoint", AppResourceServingEndpoint),
+            sql_warehouse=_from_dict(d, "sql_warehouse", AppResourceSqlWarehouse),
+        )
 
 
 @dataclass
@@ -559,29 +674,36 @@ class AppResourceJob:
     def as_dict(self) -> dict:
         """Serializes the AppResourceJob into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.permission is not None: body['permission'] = self.permission.value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.permission is not None:
+            body["permission"] = self.permission.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppResourceJob into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.permission is not None: body['permission'] = self.permission
+        if self.id is not None:
+            body["id"] = self.id
+        if self.permission is not None:
+            body["permission"] = self.permission
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceJob:
         """Deserializes the AppResourceJob from a dictionary."""
-        return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceJobJobPermission))
+        return cls(
+            id=d.get("id", None),
+            permission=_enum(d, "permission", AppResourceJobJobPermission),
+        )
 
 
 class AppResourceJobJobPermission(Enum):
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
-    CAN_VIEW = 'CAN_VIEW'
-    IS_OWNER = 'IS_OWNER'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_MANAGE_RUN = "CAN_MANAGE_RUN"
+    CAN_VIEW = "CAN_VIEW"
+    IS_OWNER = "IS_OWNER"
 
 
 @dataclass
@@ -599,33 +721,41 @@ class AppResourceSecret:
     def as_dict(self) -> dict:
         """Serializes the AppResourceSecret into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.permission is not None: body['permission'] = self.permission.value
-        if self.scope is not None: body['scope'] = self.scope
+        if self.key is not None:
+            body["key"] = self.key
+        if self.permission is not None:
+            body["permission"] = self.permission.value
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppResourceSecret into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.permission is not None: body['permission'] = self.permission
-        if self.scope is not None: body['scope'] = self.scope
+        if self.key is not None:
+            body["key"] = self.key
+        if self.permission is not None:
+            body["permission"] = self.permission
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceSecret:
         """Deserializes the AppResourceSecret from a dictionary."""
-        return cls(key=d.get('key', None),
-                   permission=_enum(d, 'permission', AppResourceSecretSecretPermission),
-                   scope=d.get('scope', None))
+        return cls(
+            key=d.get("key", None),
+            permission=_enum(d, "permission", AppResourceSecretSecretPermission),
+            scope=d.get("scope", None),
+        )
 
 
 class AppResourceSecretSecretPermission(Enum):
     """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE"."""
 
-    MANAGE = 'MANAGE'
-    READ = 'READ'
-    WRITE = 'WRITE'
+    MANAGE = "MANAGE"
+    READ = "READ"
+    WRITE = "WRITE"
 
 
 @dataclass
@@ -640,29 +770,39 @@ class AppResourceServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the AppResourceServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.permission is not None: body['permission'] = self.permission.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.permission is not None:
+            body["permission"] = self.permission.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppResourceServingEndpoint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.permission is not None: body['permission'] = self.permission
+        if self.name is not None:
+            body["name"] = self.name
+        if self.permission is not None:
+            body["permission"] = self.permission
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceServingEndpoint:
         """Deserializes the AppResourceServingEndpoint from a dictionary."""
-        return cls(name=d.get('name', None),
-                   permission=_enum(d, 'permission', AppResourceServingEndpointServingEndpointPermission))
+        return cls(
+            name=d.get("name", None),
+            permission=_enum(
+                d,
+                "permission",
+                AppResourceServingEndpointServingEndpointPermission,
+            ),
+        )
 
 
 class AppResourceServingEndpointServingEndpointPermission(Enum):
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_QUERY = 'CAN_QUERY'
-    CAN_VIEW = 'CAN_VIEW'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_QUERY = "CAN_QUERY"
+    CAN_VIEW = "CAN_VIEW"
 
 
 @dataclass
@@ -677,37 +817,43 @@ class AppResourceSqlWarehouse:
     def as_dict(self) -> dict:
         """Serializes the AppResourceSqlWarehouse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.permission is not None: body['permission'] = self.permission.value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.permission is not None:
+            body["permission"] = self.permission.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AppResourceSqlWarehouse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.permission is not None: body['permission'] = self.permission
+        if self.id is not None:
+            body["id"] = self.id
+        if self.permission is not None:
+            body["permission"] = self.permission
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceSqlWarehouse:
         """Deserializes the AppResourceSqlWarehouse from a dictionary."""
-        return cls(id=d.get('id', None),
-                   permission=_enum(d, 'permission', AppResourceSqlWarehouseSqlWarehousePermission))
+        return cls(
+            id=d.get("id", None),
+            permission=_enum(d, "permission", AppResourceSqlWarehouseSqlWarehousePermission),
+        )
 
 
 class AppResourceSqlWarehouseSqlWarehousePermission(Enum):
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_USE = 'CAN_USE'
-    IS_OWNER = 'IS_OWNER'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_USE = "CAN_USE"
+    IS_OWNER = "IS_OWNER"
 
 
 class ApplicationState(Enum):
 
-    CRASHED = 'CRASHED'
-    DEPLOYING = 'DEPLOYING'
-    RUNNING = 'RUNNING'
-    UNAVAILABLE = 'UNAVAILABLE'
+    CRASHED = "CRASHED"
+    DEPLOYING = "DEPLOYING"
+    RUNNING = "RUNNING"
+    UNAVAILABLE = "UNAVAILABLE"
 
 
 @dataclass
@@ -721,32 +867,39 @@ class ApplicationStatus:
     def as_dict(self) -> dict:
         """Serializes the ApplicationStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ApplicationStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApplicationStatus:
         """Deserializes the ApplicationStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', ApplicationState))
+        return cls(
+            message=d.get("message", None),
+            state=_enum(d, "state", ApplicationState),
+        )
 
 
 class ComputeState(Enum):
 
-    ACTIVE = 'ACTIVE'
-    DELETING = 'DELETING'
-    ERROR = 'ERROR'
-    STARTING = 'STARTING'
-    STOPPED = 'STOPPED'
-    STOPPING = 'STOPPING'
-    UPDATING = 'UPDATING'
+    ACTIVE = "ACTIVE"
+    DELETING = "DELETING"
+    ERROR = "ERROR"
+    STARTING = "STARTING"
+    STOPPED = "STOPPED"
+    STOPPING = "STOPPING"
+    UPDATING = "UPDATING"
 
 
 @dataclass
@@ -760,21 +913,28 @@ class ComputeStatus:
     def as_dict(self) -> dict:
         """Serializes the ComputeStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ComputeStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
         """Deserializes the ComputeStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState))
+        return cls(
+            message=d.get("message", None),
+            state=_enum(d, "state", ComputeState),
+        )
 
 
 @dataclass
@@ -785,19 +945,21 @@ class GetAppPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetAppPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetAppPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse:
         """Deserializes the GetAppPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', AppPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", AppPermissionsDescription))
 
 
 @dataclass
@@ -811,22 +973,28 @@ class ListAppDeploymentsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.app_deployments:
+            body["app_deployments"] = [v.as_dict() for v in self.app_deployments]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAppDeploymentsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.app_deployments: body['app_deployments'] = self.app_deployments
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.app_deployments:
+            body["app_deployments"] = self.app_deployments
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse:
         """Deserializes the ListAppDeploymentsResponse from a dictionary."""
-        return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            app_deployments=_repeated_dict(d, "app_deployments", AppDeployment),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -839,21 +1007,28 @@ class ListAppsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAppsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apps: body['apps'] = self.apps
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = self.apps
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse:
         """Deserializes the ListAppsResponse from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None))
+        return cls(
+            apps=_repeated_dict(d, "apps", App),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -870,24 +1045,30 @@ class StopAppRequest:
 
 class AppsAPI:
     """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
-    Databricks services, and enable users to interact through single sign-on."""
+    Databricks services, and enable users to interact through single sign-on.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_app_active(self,
-                            name: str,
-                            timeout=timedelta(minutes=20),
-                            callback: Optional[Callable[[App], None]] = None) -> App:
+    def wait_get_app_active(
+        self,
+        name: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[App], None]] = None,
+    ) -> App:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ComputeState.ACTIVE, )
-        failure_states = (ComputeState.ERROR, ComputeState.STOPPED, )
-        status_message = 'polling...'
+        target_states = (ComputeState.ACTIVE,)
+        failure_states = (
+            ComputeState.ERROR,
+            ComputeState.STOPPED,
+        )
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(name=name)
             status = poll.compute_status.state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.compute_status:
                 status_message = poll.compute_status.message
             if status in target_states:
@@ -895,33 +1076,34 @@ def wait_get_app_active(self,
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach ACTIVE, got {status}: {status_message}'
+                msg = f"failed to reach ACTIVE, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"name={name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def wait_get_deployment_app_succeeded(
-            self,
-            app_name: str,
-            deployment_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
+        self,
+        app_name: str,
+        deployment_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[AppDeployment], None]] = None,
+    ) -> AppDeployment:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (AppDeploymentState.SUCCEEDED, )
-        failure_states = (AppDeploymentState.FAILED, )
-        status_message = 'polling...'
+        target_states = (AppDeploymentState.SUCCEEDED,)
+        failure_states = (AppDeploymentState.FAILED,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
             status = poll.status.state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.status:
                 status_message = poll.status.message
             if status in target_states:
@@ -929,31 +1111,33 @@ def wait_get_deployment_app_succeeded(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
+                msg = f"failed to reach SUCCEEDED, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"app_name={app_name}, deployment_id={deployment_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def wait_get_app_stopped(self,
-                             name: str,
-                             timeout=timedelta(minutes=20),
-                             callback: Optional[Callable[[App], None]] = None) -> App:
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
+
+    def wait_get_app_stopped(
+        self,
+        name: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[App], None]] = None,
+    ) -> App:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ComputeState.STOPPED, )
-        failure_states = (ComputeState.ERROR, )
-        status_message = 'polling...'
+        target_states = (ComputeState.STOPPED,)
+        failure_states = (ComputeState.ERROR,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(name=name)
             status = poll.compute_status.state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.compute_status:
                 status_message = poll.compute_status.message
             if status in target_states:
@@ -961,323 +1145,415 @@ def wait_get_app_stopped(self,
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach STOPPED, got {status}: {status_message}'
+                msg = f"failed to reach STOPPED, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"name={name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
         """Create an app.
-        
+
         Creates a new app.
-        
+
         :param app: :class:`App` (optional)
         :param no_compute: bool (optional)
           If true, the app will not be started after creation.
-        
+
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         """
         body = app.as_dict()
         query = {}
-        if no_compute is not None: query['no_compute'] = no_compute
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
-        return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
-
-    def create_and_wait(self,
-                        *,
-                        app: Optional[App] = None,
-                        no_compute: Optional[bool] = None,
-                        timeout=timedelta(minutes=20)) -> App:
+        if no_compute is not None:
+            query["no_compute"] = no_compute
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
+        return Wait(
+            self.wait_get_app_active,
+            response=App.from_dict(op_response),
+            name=op_response["name"],
+        )
+
+    def create_and_wait(
+        self,
+        *,
+        app: Optional[App] = None,
+        no_compute: Optional[bool] = None,
+        timeout=timedelta(minutes=20),
+    ) -> App:
         return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
 
     def delete(self, name: str) -> App:
         """Delete an app.
-        
+
         Deletes an app.
-        
+
         :param name: str
           The name of the app.
-        
+
         :returns: :class:`App`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE', f'/api/2.0/apps/{name}', headers=headers)
+        res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers)
         return App.from_dict(res)
 
     def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]:
         """Create an app deployment.
-        
+
         Creates an app deployment for the app with the supplied name.
-        
+
         :param app_name: str
           The name of the app.
         :param app_deployment: :class:`AppDeployment` (optional)
-        
+
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         """
         body = app_deployment.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST',
-                                   f'/api/2.0/apps/{app_name}/deployments',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_deployment_app_succeeded,
-                    response=AppDeployment.from_dict(op_response),
-                    app_name=app_name,
-                    deployment_id=op_response['deployment_id'])
-
-    def deploy_and_wait(self,
-                        app_name: str,
-                        *,
-                        app_deployment: Optional[AppDeployment] = None,
-                        timeout=timedelta(minutes=20)) -> AppDeployment:
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "POST",
+            f"/api/2.0/apps/{app_name}/deployments",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_deployment_app_succeeded,
+            response=AppDeployment.from_dict(op_response),
+            app_name=app_name,
+            deployment_id=op_response["deployment_id"],
+        )
+
+    def deploy_and_wait(
+        self,
+        app_name: str,
+        *,
+        app_deployment: Optional[AppDeployment] = None,
+        timeout=timedelta(minutes=20),
+    ) -> AppDeployment:
         return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
 
     def get(self, name: str) -> App:
         """Get an app.
-        
+
         Retrieves information for the app with the supplied name.
-        
+
         :param name: str
           The name of the app.
-        
+
         :returns: :class:`App`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/apps/{name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/apps/{name}", headers=headers)
         return App.from_dict(res)
 
     def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment:
         """Get an app deployment.
-        
+
         Retrieves information for the app deployment with the supplied name and deployment id.
-        
+
         :param app_name: str
           The name of the app.
         :param deployment_id: str
           The unique id of the deployment.
-        
+
         :returns: :class:`AppDeployment`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/apps/{app_name}/deployments/{deployment_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/apps/{app_name}/deployments/{deployment_id}",
+            headers=headers,
+        )
         return AppDeployment.from_dict(res)
 
     def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse:
         """Get app permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param app_name: str
           The app for which to get or manage permissions.
-        
+
         :returns: :class:`GetAppPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}/permissionLevels', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/apps/{app_name}/permissionLevels",
+            headers=headers,
+        )
         return GetAppPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, app_name: str) -> AppPermissions:
         """Get app permissions.
-        
+
         Gets the permissions of an app. Apps can inherit permissions from their root object.
-        
+
         :param app_name: str
           The app for which to get or manage permissions.
-        
+
         :returns: :class:`AppPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}", headers=headers)
         return AppPermissions.from_dict(res)
 
-    def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[App]:
         """List apps.
-        
+
         Lists all apps in the workspace.
-        
+
         :param page_size: int (optional)
           Upper bound for items returned.
         :param page_token: str (optional)
           Pagination token to go to the next page of apps. Requests first page if absent.
-        
+
         :returns: Iterator over :class:`App`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/apps', query=query, headers=headers)
-            if 'apps' in json:
-                for v in json['apps']:
+            json = self._api.do("GET", "/api/2.0/apps", query=query, headers=headers)
+            if "apps" in json:
+                for v in json["apps"]:
                     yield App.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_deployments(self,
-                         app_name: str,
-                         *,
-                         page_size: Optional[int] = None,
-                         page_token: Optional[str] = None) -> Iterator[AppDeployment]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_deployments(
+        self,
+        app_name: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[AppDeployment]:
         """List app deployments.
-        
+
         Lists all app deployments for the app with the supplied name.
-        
+
         :param app_name: str
           The name of the app.
         :param page_size: int (optional)
           Upper bound for items returned.
         :param page_token: str (optional)
           Pagination token to go to the next page of apps. Requests first page if absent.
-        
+
         :returns: Iterator over :class:`AppDeployment`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', f'/api/2.0/apps/{app_name}/deployments', query=query, headers=headers)
-            if 'app_deployments' in json:
-                for v in json['app_deployments']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/apps/{app_name}/deployments",
+                query=query,
+                headers=headers,
+            )
+            if "app_deployments" in json:
+                for v in json["app_deployments"]:
                     yield AppDeployment.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def set_permissions(
-            self,
-            app_name: str,
-            *,
-            access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions:
+        self,
+        app_name: str,
+        *,
+        access_control_list: Optional[List[AppAccessControlRequest]] = None,
+    ) -> AppPermissions:
         """Set app permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param app_name: str
           The app for which to get or manage permissions.
         :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
-        
+
         :returns: :class:`AppPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/apps/{app_name}",
+            body=body,
+            headers=headers,
+        )
         return AppPermissions.from_dict(res)
 
     def start(self, name: str) -> Wait[App]:
         """Start an app.
-        
+
         Start the last active deployment of the app in the workspace.
-        
+
         :param name: str
           The name of the app.
-        
+
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         """
 
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        op_response = self._api.do('POST', f'/api/2.0/apps/{name}/start', headers=headers)
-        return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
+        op_response = self._api.do("POST", f"/api/2.0/apps/{name}/start", headers=headers)
+        return Wait(
+            self.wait_get_app_active,
+            response=App.from_dict(op_response),
+            name=op_response["name"],
+        )
 
     def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
         return self.start(name=name).result(timeout=timeout)
 
     def stop(self, name: str) -> Wait[App]:
         """Stop an app.
-        
+
         Stops the active deployment of the app in the workspace.
-        
+
         :param name: str
           The name of the app.
-        
+
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_stopped for more details.
         """
 
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        op_response = self._api.do('POST', f'/api/2.0/apps/{name}/stop', headers=headers)
-        return Wait(self.wait_get_app_stopped, response=App.from_dict(op_response), name=op_response['name'])
+        op_response = self._api.do("POST", f"/api/2.0/apps/{name}/stop", headers=headers)
+        return Wait(
+            self.wait_get_app_stopped,
+            response=App.from_dict(op_response),
+            name=op_response["name"],
+        )
 
     def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
         return self.stop(name=name).result(timeout=timeout)
 
     def update(self, name: str, *, app: Optional[App] = None) -> App:
         """Update an app.
-        
+
         Updates the app with the supplied name.
-        
+
         :param name: str
           The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
           must be unique within the workspace.
         :param app: :class:`App` (optional)
-        
+
         :returns: :class:`App`
         """
         body = app.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
+        res = self._api.do("PATCH", f"/api/2.0/apps/{name}", body=body, headers=headers)
         return App.from_dict(res)
 
     def update_permissions(
-            self,
-            app_name: str,
-            *,
-            access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions:
+        self,
+        app_name: str,
+        *,
+        access_control_list: Optional[List[AppAccessControlRequest]] = None,
+    ) -> AppPermissions:
         """Update app permissions.
-        
+
         Updates the permissions on an app. Apps can inherit permissions from their root object.
-        
+
         :param app_name: str
           The app for which to get or manage permissions.
         :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
-        
+
         :returns: :class:`AppPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/apps/{app_name}",
+            body=body,
+            headers=headers,
+        )
         return AppPermissions.from_dict(res)
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index dd2579921..ed1c1c48c 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 from databricks.sdk.service import compute
 
@@ -31,31 +31,37 @@ def as_dict(self) -> dict:
         """Serializes the ActionConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.action_configuration_id is not None:
-            body['action_configuration_id'] = self.action_configuration_id
-        if self.action_type is not None: body['action_type'] = self.action_type.value
-        if self.target is not None: body['target'] = self.target
+            body["action_configuration_id"] = self.action_configuration_id
+        if self.action_type is not None:
+            body["action_type"] = self.action_type.value
+        if self.target is not None:
+            body["target"] = self.target
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ActionConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.action_configuration_id is not None:
-            body['action_configuration_id'] = self.action_configuration_id
-        if self.action_type is not None: body['action_type'] = self.action_type
-        if self.target is not None: body['target'] = self.target
+            body["action_configuration_id"] = self.action_configuration_id
+        if self.action_type is not None:
+            body["action_type"] = self.action_type
+        if self.target is not None:
+            body["target"] = self.target
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration:
         """Deserializes the ActionConfiguration from a dictionary."""
-        return cls(action_configuration_id=d.get('action_configuration_id', None),
-                   action_type=_enum(d, 'action_type', ActionConfigurationType),
-                   target=d.get('target', None))
+        return cls(
+            action_configuration_id=d.get("action_configuration_id", None),
+            action_type=_enum(d, "action_type", ActionConfigurationType),
+            target=d.get("target", None),
+        )
 
 
 class ActionConfigurationType(Enum):
 
-    EMAIL_NOTIFICATION = 'EMAIL_NOTIFICATION'
+    EMAIL_NOTIFICATION = "EMAIL_NOTIFICATION"
 
 
 @dataclass
@@ -85,51 +91,62 @@ def as_dict(self) -> dict:
         """Serializes the AlertConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.action_configurations:
-            body['action_configurations'] = [v.as_dict() for v in self.action_configurations]
+            body["action_configurations"] = [v.as_dict() for v in self.action_configurations]
         if self.alert_configuration_id is not None:
-            body['alert_configuration_id'] = self.alert_configuration_id
-        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
-        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value
-        if self.time_period is not None: body['time_period'] = self.time_period.value
-        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
+            body["alert_configuration_id"] = self.alert_configuration_id
+        if self.quantity_threshold is not None:
+            body["quantity_threshold"] = self.quantity_threshold
+        if self.quantity_type is not None:
+            body["quantity_type"] = self.quantity_type.value
+        if self.time_period is not None:
+            body["time_period"] = self.time_period.value
+        if self.trigger_type is not None:
+            body["trigger_type"] = self.trigger_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.action_configurations: body['action_configurations'] = self.action_configurations
+        if self.action_configurations:
+            body["action_configurations"] = self.action_configurations
         if self.alert_configuration_id is not None:
-            body['alert_configuration_id'] = self.alert_configuration_id
-        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
-        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type
-        if self.time_period is not None: body['time_period'] = self.time_period
-        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type
+            body["alert_configuration_id"] = self.alert_configuration_id
+        if self.quantity_threshold is not None:
+            body["quantity_threshold"] = self.quantity_threshold
+        if self.quantity_type is not None:
+            body["quantity_type"] = self.quantity_type
+        if self.time_period is not None:
+            body["time_period"] = self.time_period
+        if self.trigger_type is not None:
+            body["trigger_type"] = self.trigger_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration:
         """Deserializes the AlertConfiguration from a dictionary."""
-        return cls(action_configurations=_repeated_dict(d, 'action_configurations', ActionConfiguration),
-                   alert_configuration_id=d.get('alert_configuration_id', None),
-                   quantity_threshold=d.get('quantity_threshold', None),
-                   quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType),
-                   time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod),
-                   trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType))
+        return cls(
+            action_configurations=_repeated_dict(d, "action_configurations", ActionConfiguration),
+            alert_configuration_id=d.get("alert_configuration_id", None),
+            quantity_threshold=d.get("quantity_threshold", None),
+            quantity_type=_enum(d, "quantity_type", AlertConfigurationQuantityType),
+            time_period=_enum(d, "time_period", AlertConfigurationTimePeriod),
+            trigger_type=_enum(d, "trigger_type", AlertConfigurationTriggerType),
+        )
 
 
 class AlertConfigurationQuantityType(Enum):
 
-    LIST_PRICE_DOLLARS_USD = 'LIST_PRICE_DOLLARS_USD'
+    LIST_PRICE_DOLLARS_USD = "LIST_PRICE_DOLLARS_USD"
 
 
 class AlertConfigurationTimePeriod(Enum):
 
-    MONTH = 'MONTH'
+    MONTH = "MONTH"
 
 
 class AlertConfigurationTriggerType(Enum):
 
-    CUMULATIVE_SPENDING_EXCEEDED = 'CUMULATIVE_SPENDING_EXCEEDED'
+    CUMULATIVE_SPENDING_EXCEEDED = "CUMULATIVE_SPENDING_EXCEEDED"
 
 
 @dataclass
@@ -161,40 +178,53 @@ class BudgetConfiguration:
     def as_dict(self) -> dict:
         """Serializes the BudgetConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
         if self.alert_configurations:
-            body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations]
+            body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations]
         if self.budget_configuration_id is not None:
-            body['budget_configuration_id'] = self.budget_configuration_id
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.filter: body['filter'] = self.filter.as_dict()
-        if self.update_time is not None: body['update_time'] = self.update_time
+            body["budget_configuration_id"] = self.budget_configuration_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.filter:
+            body["filter"] = self.filter.as_dict()
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BudgetConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.alert_configurations:
+            body["alert_configurations"] = self.alert_configurations
         if self.budget_configuration_id is not None:
-            body['budget_configuration_id'] = self.budget_configuration_id
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.filter: body['filter'] = self.filter
-        if self.update_time is not None: body['update_time'] = self.update_time
+            body["budget_configuration_id"] = self.budget_configuration_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.filter:
+            body["filter"] = self.filter
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration:
         """Deserializes the BudgetConfiguration from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration),
-                   budget_configuration_id=d.get('budget_configuration_id', None),
-                   create_time=d.get('create_time', None),
-                   display_name=d.get('display_name', None),
-                   filter=_from_dict(d, 'filter', BudgetConfigurationFilter),
-                   update_time=d.get('update_time', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            alert_configurations=_repeated_dict(d, "alert_configurations", AlertConfiguration),
+            budget_configuration_id=d.get("budget_configuration_id", None),
+            create_time=d.get("create_time", None),
+            display_name=d.get("display_name", None),
+            filter=_from_dict(d, "filter", BudgetConfigurationFilter),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -210,22 +240,28 @@ class BudgetConfigurationFilter:
     def as_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict()
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.workspace_id:
+            body["workspace_id"] = self.workspace_id.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.tags: body['tags'] = self.tags
-        if self.workspace_id: body['workspace_id'] = self.workspace_id
+        if self.tags:
+            body["tags"] = self.tags
+        if self.workspace_id:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter:
         """Deserializes the BudgetConfigurationFilter from a dictionary."""
-        return cls(tags=_repeated_dict(d, 'tags', BudgetConfigurationFilterTagClause),
-                   workspace_id=_from_dict(d, 'workspace_id', BudgetConfigurationFilterWorkspaceIdClause))
+        return cls(
+            tags=_repeated_dict(d, "tags", BudgetConfigurationFilterTagClause),
+            workspace_id=_from_dict(d, "workspace_id", BudgetConfigurationFilterWorkspaceIdClause),
+        )
 
 
 @dataclass
@@ -237,27 +273,33 @@ class BudgetConfigurationFilterClause:
     def as_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilterClause into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.operator is not None: body['operator'] = self.operator.value
-        if self.values: body['values'] = [v for v in self.values]
+        if self.operator is not None:
+            body["operator"] = self.operator.value
+        if self.values:
+            body["values"] = [v for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilterClause into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.operator is not None: body['operator'] = self.operator
-        if self.values: body['values'] = self.values
+        if self.operator is not None:
+            body["operator"] = self.operator
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause:
         """Deserializes the BudgetConfigurationFilterClause from a dictionary."""
-        return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator),
-                   values=d.get('values', None))
+        return cls(
+            operator=_enum(d, "operator", BudgetConfigurationFilterOperator),
+            values=d.get("values", None),
+        )
 
 
 class BudgetConfigurationFilterOperator(Enum):
 
-    IN = 'IN'
+    IN = "IN"
 
 
 @dataclass
@@ -269,21 +311,28 @@ class BudgetConfigurationFilterTagClause:
     def as_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilterTagClause into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value: body['value'] = self.value.as_dict()
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value:
+            body["value"] = self.value.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilterTagClause into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause:
         """Deserializes the BudgetConfigurationFilterTagClause from a dictionary."""
-        return cls(key=d.get('key', None), value=_from_dict(d, 'value', BudgetConfigurationFilterClause))
+        return cls(
+            key=d.get("key", None),
+            value=_from_dict(d, "value", BudgetConfigurationFilterClause),
+        )
 
 
 @dataclass
@@ -295,22 +344,28 @@ class BudgetConfigurationFilterWorkspaceIdClause:
     def as_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.operator is not None: body['operator'] = self.operator.value
-        if self.values: body['values'] = [v for v in self.values]
+        if self.operator is not None:
+            body["operator"] = self.operator.value
+        if self.values:
+            body["values"] = [v for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.operator is not None: body['operator'] = self.operator
-        if self.values: body['values'] = self.values
+        if self.operator is not None:
+            body["operator"] = self.operator
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause:
         """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary."""
-        return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator),
-                   values=d.get('values', None))
+        return cls(
+            operator=_enum(d, "operator", BudgetConfigurationFilterOperator),
+            values=d.get("values", None),
+        )
 
 
 @dataclass
@@ -330,25 +385,33 @@ class BudgetPolicy:
     def as_dict(self) -> dict:
         """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.custom_tags:
+            body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.policy_name is not None:
+            body["policy_name"] = self.policy_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.policy_name is not None:
+            body["policy_name"] = self.policy_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetPolicy:
         """Deserializes the BudgetPolicy from a dictionary."""
-        return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag),
-                   policy_id=d.get('policy_id', None),
-                   policy_name=d.get('policy_name', None))
+        return cls(
+            custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag),
+            policy_id=d.get("policy_id", None),
+            policy_name=d.get("policy_name", None),
+        )
 
 
 @dataclass
@@ -363,22 +426,28 @@ class CreateBillingUsageDashboardRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type.value
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.dashboard_type is not None:
+            body["dashboard_type"] = self.dashboard_type.value
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.dashboard_type is not None:
+            body["dashboard_type"] = self.dashboard_type
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest:
         """Deserializes the CreateBillingUsageDashboardRequest from a dictionary."""
-        return cls(dashboard_type=_enum(d, 'dashboard_type', UsageDashboardType),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            dashboard_type=_enum(d, "dashboard_type", UsageDashboardType),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -389,19 +458,21 @@ class CreateBillingUsageDashboardResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse:
         """Deserializes the CreateBillingUsageDashboardResponse from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None))
+        return cls(dashboard_id=d.get("dashboard_id", None))
 
 
 @dataclass
@@ -424,30 +495,42 @@ class CreateBudgetConfigurationBudget:
     def as_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
         if self.alert_configurations:
-            body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.filter: body['filter'] = self.filter.as_dict()
+            body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.filter:
+            body["filter"] = self.filter.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.filter: body['filter'] = self.filter
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.alert_configurations:
+            body["alert_configurations"] = self.alert_configurations
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.filter:
+            body["filter"] = self.filter
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget:
         """Deserializes the CreateBudgetConfigurationBudget from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   alert_configurations=_repeated_dict(d, 'alert_configurations',
-                                                       CreateBudgetConfigurationBudgetAlertConfigurations),
-                   display_name=d.get('display_name', None),
-                   filter=_from_dict(d, 'filter', BudgetConfigurationFilter))
+        return cls(
+            account_id=d.get("account_id", None),
+            alert_configurations=_repeated_dict(
+                d,
+                "alert_configurations",
+                CreateBudgetConfigurationBudgetAlertConfigurations,
+            ),
+            display_name=d.get("display_name", None),
+            filter=_from_dict(d, "filter", BudgetConfigurationFilter),
+        )
 
 
 @dataclass
@@ -461,21 +544,28 @@ class CreateBudgetConfigurationBudgetActionConfigurations:
     def as_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.action_type is not None: body['action_type'] = self.action_type.value
-        if self.target is not None: body['target'] = self.target
+        if self.action_type is not None:
+            body["action_type"] = self.action_type.value
+        if self.target is not None:
+            body["target"] = self.target
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.action_type is not None: body['action_type'] = self.action_type
-        if self.target is not None: body['target'] = self.target
+        if self.action_type is not None:
+            body["action_type"] = self.action_type
+        if self.target is not None:
+            body["target"] = self.target
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations:
         """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary."""
-        return cls(action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None))
+        return cls(
+            action_type=_enum(d, "action_type", ActionConfigurationType),
+            target=d.get("target", None),
+        )
 
 
 @dataclass
@@ -502,32 +592,46 @@ def as_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.action_configurations:
-            body['action_configurations'] = [v.as_dict() for v in self.action_configurations]
-        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
-        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value
-        if self.time_period is not None: body['time_period'] = self.time_period.value
-        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
+            body["action_configurations"] = [v.as_dict() for v in self.action_configurations]
+        if self.quantity_threshold is not None:
+            body["quantity_threshold"] = self.quantity_threshold
+        if self.quantity_type is not None:
+            body["quantity_type"] = self.quantity_type.value
+        if self.time_period is not None:
+            body["time_period"] = self.time_period.value
+        if self.trigger_type is not None:
+            body["trigger_type"] = self.trigger_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.action_configurations: body['action_configurations'] = self.action_configurations
-        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
-        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type
-        if self.time_period is not None: body['time_period'] = self.time_period
-        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type
+        if self.action_configurations:
+            body["action_configurations"] = self.action_configurations
+        if self.quantity_threshold is not None:
+            body["quantity_threshold"] = self.quantity_threshold
+        if self.quantity_type is not None:
+            body["quantity_type"] = self.quantity_type
+        if self.time_period is not None:
+            body["time_period"] = self.time_period
+        if self.trigger_type is not None:
+            body["trigger_type"] = self.trigger_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations:
         """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary."""
-        return cls(action_configurations=_repeated_dict(d, 'action_configurations',
-                                                        CreateBudgetConfigurationBudgetActionConfigurations),
-                   quantity_threshold=d.get('quantity_threshold', None),
-                   quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType),
-                   time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod),
-                   trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType))
+        return cls(
+            action_configurations=_repeated_dict(
+                d,
+                "action_configurations",
+                CreateBudgetConfigurationBudgetActionConfigurations,
+            ),
+            quantity_threshold=d.get("quantity_threshold", None),
+            quantity_type=_enum(d, "quantity_type", AlertConfigurationQuantityType),
+            time_period=_enum(d, "time_period", AlertConfigurationTimePeriod),
+            trigger_type=_enum(d, "trigger_type", AlertConfigurationTriggerType),
+        )
 
 
 @dataclass
@@ -538,19 +642,21 @@ class CreateBudgetConfigurationRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget: body['budget'] = self.budget.as_dict()
+        if self.budget:
+            body["budget"] = self.budget.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget: body['budget'] = self.budget
+        if self.budget:
+            body["budget"] = self.budget
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest:
         """Deserializes the CreateBudgetConfigurationRequest from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', CreateBudgetConfigurationBudget))
+        return cls(budget=_from_dict(d, "budget", CreateBudgetConfigurationBudget))
 
 
 @dataclass
@@ -561,19 +667,21 @@ class CreateBudgetConfigurationResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget: body['budget'] = self.budget.as_dict()
+        if self.budget:
+            body["budget"] = self.budget.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget: body['budget'] = self.budget
+        if self.budget:
+            body["budget"] = self.budget
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse:
         """Deserializes the CreateBudgetConfigurationResponse from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
+        return cls(budget=_from_dict(d, "budget", BudgetConfiguration))
 
 
 @dataclass
@@ -594,25 +702,33 @@ class CreateBudgetPolicyRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
-        if self.policy_name is not None: body['policy_name'] = self.policy_name
-        if self.request_id is not None: body['request_id'] = self.request_id
+        if self.custom_tags:
+            body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
+        if self.policy_name is not None:
+            body["policy_name"] = self.policy_name
+        if self.request_id is not None:
+            body["request_id"] = self.request_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.policy_name is not None: body['policy_name'] = self.policy_name
-        if self.request_id is not None: body['request_id'] = self.request_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.policy_name is not None:
+            body["policy_name"] = self.policy_name
+        if self.request_id is not None:
+            body["request_id"] = self.request_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetPolicyRequest:
         """Deserializes the CreateBudgetPolicyRequest from a dictionary."""
-        return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag),
-                   policy_name=d.get('policy_name', None),
-                   request_id=d.get('request_id', None))
+        return cls(
+            custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag),
+            policy_name=d.get("policy_name", None),
+            request_id=d.get("request_id", None),
+        )
 
 
 @dataclass
@@ -689,45 +805,63 @@ class CreateLogDeliveryConfigurationParams:
     def as_dict(self) -> dict:
         """Serializes the CreateLogDeliveryConfigurationParams into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.config_name is not None: body['config_name'] = self.config_name
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
-        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
-        if self.log_type is not None: body['log_type'] = self.log_type.value
-        if self.output_format is not None: body['output_format'] = self.output_format.value
-        if self.status is not None: body['status'] = self.status.value
+        if self.config_name is not None:
+            body["config_name"] = self.config_name
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.delivery_path_prefix is not None:
+            body["delivery_path_prefix"] = self.delivery_path_prefix
+        if self.delivery_start_time is not None:
+            body["delivery_start_time"] = self.delivery_start_time
+        if self.log_type is not None:
+            body["log_type"] = self.log_type.value
+        if self.output_format is not None:
+            body["output_format"] = self.output_format.value
+        if self.status is not None:
+            body["status"] = self.status.value
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
-        if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter]
+            body["storage_configuration_id"] = self.storage_configuration_id
+        if self.workspace_ids_filter:
+            body["workspace_ids_filter"] = [v for v in self.workspace_ids_filter]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateLogDeliveryConfigurationParams into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.config_name is not None: body['config_name'] = self.config_name
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
-        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
-        if self.log_type is not None: body['log_type'] = self.log_type
-        if self.output_format is not None: body['output_format'] = self.output_format
-        if self.status is not None: body['status'] = self.status
+        if self.config_name is not None:
+            body["config_name"] = self.config_name
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.delivery_path_prefix is not None:
+            body["delivery_path_prefix"] = self.delivery_path_prefix
+        if self.delivery_start_time is not None:
+            body["delivery_start_time"] = self.delivery_start_time
+        if self.log_type is not None:
+            body["log_type"] = self.log_type
+        if self.output_format is not None:
+            body["output_format"] = self.output_format
+        if self.status is not None:
+            body["status"] = self.status
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
-        if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter
+            body["storage_configuration_id"] = self.storage_configuration_id
+        if self.workspace_ids_filter:
+            body["workspace_ids_filter"] = self.workspace_ids_filter
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams:
         """Deserializes the CreateLogDeliveryConfigurationParams from a dictionary."""
-        return cls(config_name=d.get('config_name', None),
-                   credentials_id=d.get('credentials_id', None),
-                   delivery_path_prefix=d.get('delivery_path_prefix', None),
-                   delivery_start_time=d.get('delivery_start_time', None),
-                   log_type=_enum(d, 'log_type', LogType),
-                   output_format=_enum(d, 'output_format', OutputFormat),
-                   status=_enum(d, 'status', LogDeliveryConfigStatus),
-                   storage_configuration_id=d.get('storage_configuration_id', None),
-                   workspace_ids_filter=d.get('workspace_ids_filter', None))
+        return cls(
+            config_name=d.get("config_name", None),
+            credentials_id=d.get("credentials_id", None),
+            delivery_path_prefix=d.get("delivery_path_prefix", None),
+            delivery_start_time=d.get("delivery_start_time", None),
+            log_type=_enum(d, "log_type", LogType),
+            output_format=_enum(d, "output_format", OutputFormat),
+            status=_enum(d, "status", LogDeliveryConfigStatus),
+            storage_configuration_id=d.get("storage_configuration_id", None),
+            workspace_ids_filter=d.get("workspace_ids_filter", None),
+        )
 
 
 @dataclass
@@ -775,13 +909,14 @@ class DeliveryStatus(Enum):
     misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The
     latest attempt of log delivery failed because of an Databricks internal error. Contact support
     if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been
-    disabled since the release of this feature or there are no workspaces in the account."""
+    disabled since the release of this feature or there are no workspaces in the account.
+    """
 
-    CREATED = 'CREATED'
-    NOT_FOUND = 'NOT_FOUND'
-    SUCCEEDED = 'SUCCEEDED'
-    SYSTEM_FAILURE = 'SYSTEM_FAILURE'
-    USER_FAILURE = 'USER_FAILURE'
+    CREATED = "CREATED"
+    NOT_FOUND = "NOT_FOUND"
+    SUCCEEDED = "SUCCEEDED"
+    SYSTEM_FAILURE = "SYSTEM_FAILURE"
+    USER_FAILURE = "USER_FAILURE"
 
 
 @dataclass
@@ -791,19 +926,21 @@ class DownloadResponse:
     def as_dict(self) -> dict:
         """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         """Deserializes the DownloadResponse from a dictionary."""
-        return cls(contents=d.get('contents', None))
+        return cls(contents=d.get("contents", None))
 
 
 @dataclass
@@ -823,25 +960,33 @@ class Filter:
     def as_dict(self) -> dict:
         """Serializes the Filter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.creator_user_id is not None:
+            body["creator_user_id"] = self.creator_user_id
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.policy_name is not None:
+            body["policy_name"] = self.policy_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Filter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.creator_user_id is not None:
+            body["creator_user_id"] = self.creator_user_id
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.policy_name is not None:
+            body["policy_name"] = self.policy_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Filter:
         """Deserializes the Filter from a dictionary."""
-        return cls(creator_user_id=d.get('creator_user_id', None),
-                   creator_user_name=d.get('creator_user_name', None),
-                   policy_name=d.get('policy_name', None))
+        return cls(
+            creator_user_id=d.get("creator_user_id", None),
+            creator_user_name=d.get("creator_user_name", None),
+            policy_name=d.get("policy_name", None),
+        )
 
 
 @dataclass
@@ -855,21 +1000,28 @@ class GetBillingUsageDashboardResponse:
     def as_dict(self) -> dict:
         """Serializes the GetBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.dashboard_url is not None:
+            body["dashboard_url"] = self.dashboard_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.dashboard_url is not None:
+            body["dashboard_url"] = self.dashboard_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse:
         """Deserializes the GetBillingUsageDashboardResponse from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None), dashboard_url=d.get('dashboard_url', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            dashboard_url=d.get("dashboard_url", None),
+        )
 
 
 @dataclass
@@ -879,19 +1031,21 @@ class GetBudgetConfigurationResponse:
     def as_dict(self) -> dict:
         """Serializes the GetBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget: body['budget'] = self.budget.as_dict()
+        if self.budget:
+            body["budget"] = self.budget.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget: body['budget'] = self.budget
+        if self.budget:
+            body["budget"] = self.budget
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse:
         """Deserializes the GetBudgetConfigurationResponse from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
+        return cls(budget=_from_dict(d, "budget", BudgetConfiguration))
 
 
 @dataclass
@@ -926,22 +1080,28 @@ class ListBudgetConfigurationsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListBudgetConfigurationsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.budgets:
+            body["budgets"] = [v.as_dict() for v in self.budgets]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListBudgetConfigurationsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budgets: body['budgets'] = self.budgets
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.budgets:
+            body["budgets"] = self.budgets
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse:
         """Deserializes the ListBudgetConfigurationsResponse from a dictionary."""
-        return cls(budgets=_repeated_dict(d, 'budgets', BudgetConfiguration),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            budgets=_repeated_dict(d, "budgets", BudgetConfiguration),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -961,35 +1121,44 @@ class ListBudgetPoliciesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListBudgetPoliciesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
-        if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.policies:
+            body["policies"] = [v.as_dict() for v in self.policies]
+        if self.previous_page_token is not None:
+            body["previous_page_token"] = self.previous_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListBudgetPoliciesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.policies: body['policies'] = self.policies
-        if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.policies:
+            body["policies"] = self.policies
+        if self.previous_page_token is not None:
+            body["previous_page_token"] = self.previous_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListBudgetPoliciesResponse:
         """Deserializes the ListBudgetPoliciesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   policies=_repeated_dict(d, 'policies', BudgetPolicy),
-                   previous_page_token=d.get('previous_page_token', None))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            policies=_repeated_dict(d, "policies", BudgetPolicy),
+            previous_page_token=d.get("previous_page_token", None),
+        )
 
 
 class LogDeliveryConfigStatus(Enum):
     """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled).
     Defaults to `ENABLED`. You can [enable or disable the
     configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration
-    is not supported, so disable a log delivery configuration that is no longer needed."""
+    is not supported, so disable a log delivery configuration that is no longer needed.
+    """
 
-    DISABLED = 'DISABLED'
-    ENABLED = 'ENABLED'
+    DISABLED = "DISABLED"
+    ENABLED = "ENABLED"
 
 
 @dataclass
@@ -1081,60 +1250,88 @@ class LogDeliveryConfiguration:
     def as_dict(self) -> dict:
         """Serializes the LogDeliveryConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.config_id is not None: body['config_id'] = self.config_id
-        if self.config_name is not None: body['config_name'] = self.config_name
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
-        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
-        if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status.as_dict()
-        if self.log_type is not None: body['log_type'] = self.log_type.value
-        if self.output_format is not None: body['output_format'] = self.output_format.value
-        if self.status is not None: body['status'] = self.status.value
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.config_id is not None:
+            body["config_id"] = self.config_id
+        if self.config_name is not None:
+            body["config_name"] = self.config_name
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.delivery_path_prefix is not None:
+            body["delivery_path_prefix"] = self.delivery_path_prefix
+        if self.delivery_start_time is not None:
+            body["delivery_start_time"] = self.delivery_start_time
+        if self.log_delivery_status:
+            body["log_delivery_status"] = self.log_delivery_status.as_dict()
+        if self.log_type is not None:
+            body["log_type"] = self.log_type.value
+        if self.output_format is not None:
+            body["output_format"] = self.output_format.value
+        if self.status is not None:
+            body["status"] = self.status.value
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter]
+            body["storage_configuration_id"] = self.storage_configuration_id
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.workspace_ids_filter:
+            body["workspace_ids_filter"] = [v for v in self.workspace_ids_filter]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.config_id is not None: body['config_id'] = self.config_id
-        if self.config_name is not None: body['config_name'] = self.config_name
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
-        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
-        if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status
-        if self.log_type is not None: body['log_type'] = self.log_type
-        if self.output_format is not None: body['output_format'] = self.output_format
-        if self.status is not None: body['status'] = self.status
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.config_id is not None:
+            body["config_id"] = self.config_id
+        if self.config_name is not None:
+            body["config_name"] = self.config_name
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.delivery_path_prefix is not None:
+            body["delivery_path_prefix"] = self.delivery_path_prefix
+        if self.delivery_start_time is not None:
+            body["delivery_start_time"] = self.delivery_start_time
+        if self.log_delivery_status:
+            body["log_delivery_status"] = self.log_delivery_status
+        if self.log_type is not None:
+            body["log_type"] = self.log_type
+        if self.output_format is not None:
+            body["output_format"] = self.output_format
+        if self.status is not None:
+            body["status"] = self.status
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter
+            body["storage_configuration_id"] = self.storage_configuration_id
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.workspace_ids_filter:
+            body["workspace_ids_filter"] = self.workspace_ids_filter
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogDeliveryConfiguration:
         """Deserializes the LogDeliveryConfiguration from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   config_id=d.get('config_id', None),
-                   config_name=d.get('config_name', None),
-                   creation_time=d.get('creation_time', None),
-                   credentials_id=d.get('credentials_id', None),
-                   delivery_path_prefix=d.get('delivery_path_prefix', None),
-                   delivery_start_time=d.get('delivery_start_time', None),
-                   log_delivery_status=_from_dict(d, 'log_delivery_status', LogDeliveryStatus),
-                   log_type=_enum(d, 'log_type', LogType),
-                   output_format=_enum(d, 'output_format', OutputFormat),
-                   status=_enum(d, 'status', LogDeliveryConfigStatus),
-                   storage_configuration_id=d.get('storage_configuration_id', None),
-                   update_time=d.get('update_time', None),
-                   workspace_ids_filter=d.get('workspace_ids_filter', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            config_id=d.get("config_id", None),
+            config_name=d.get("config_name", None),
+            creation_time=d.get("creation_time", None),
+            credentials_id=d.get("credentials_id", None),
+            delivery_path_prefix=d.get("delivery_path_prefix", None),
+            delivery_start_time=d.get("delivery_start_time", None),
+            log_delivery_status=_from_dict(d, "log_delivery_status", LogDeliveryStatus),
+            log_type=_enum(d, "log_type", LogType),
+            output_format=_enum(d, "output_format", OutputFormat),
+            status=_enum(d, "status", LogDeliveryConfigStatus),
+            storage_configuration_id=d.get("storage_configuration_id", None),
+            update_time=d.get("update_time", None),
+            workspace_ids_filter=d.get("workspace_ids_filter", None),
+        )
 
 
 @dataclass
@@ -1163,63 +1360,73 @@ class LogDeliveryStatus:
     def as_dict(self) -> dict:
         """Serializes the LogDeliveryStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time
+        if self.last_attempt_time is not None:
+            body["last_attempt_time"] = self.last_attempt_time
         if self.last_successful_attempt_time is not None:
-            body['last_successful_attempt_time'] = self.last_successful_attempt_time
-        if self.message is not None: body['message'] = self.message
-        if self.status is not None: body['status'] = self.status.value
+            body["last_successful_attempt_time"] = self.last_successful_attempt_time
+        if self.message is not None:
+            body["message"] = self.message
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogDeliveryStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time
+        if self.last_attempt_time is not None:
+            body["last_attempt_time"] = self.last_attempt_time
         if self.last_successful_attempt_time is not None:
-            body['last_successful_attempt_time'] = self.last_successful_attempt_time
-        if self.message is not None: body['message'] = self.message
-        if self.status is not None: body['status'] = self.status
+            body["last_successful_attempt_time"] = self.last_successful_attempt_time
+        if self.message is not None:
+            body["message"] = self.message
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogDeliveryStatus:
         """Deserializes the LogDeliveryStatus from a dictionary."""
-        return cls(last_attempt_time=d.get('last_attempt_time', None),
-                   last_successful_attempt_time=d.get('last_successful_attempt_time', None),
-                   message=d.get('message', None),
-                   status=_enum(d, 'status', DeliveryStatus))
+        return cls(
+            last_attempt_time=d.get("last_attempt_time", None),
+            last_successful_attempt_time=d.get("last_successful_attempt_time", None),
+            message=d.get("message", None),
+            status=_enum(d, "status", DeliveryStatus),
+        )
 
 
 class LogType(Enum):
     """Log delivery type. Supported values are:
-    
+
     * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the
     [View billable usage].
-    
+
     * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit
     logging]
-    
+
     [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
     [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
     [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
-    [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html"""
+    [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+    """
 
-    AUDIT_LOGS = 'AUDIT_LOGS'
-    BILLABLE_USAGE = 'BILLABLE_USAGE'
+    AUDIT_LOGS = "AUDIT_LOGS"
+    BILLABLE_USAGE = "BILLABLE_USAGE"
 
 
 class OutputFormat(Enum):
     """The file type of log delivery.
-    
+
     * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated
     values) format is supported. For the schema, see the [View billable usage] * If `log_type` is
     `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is
     supported. For the schema, see the [Configuring audit logs].
-    
+
     [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
-    [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html"""
+    [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
+    """
 
-    CSV = 'CSV'
-    JSON = 'JSON'
+    CSV = "CSV"
+    JSON = "JSON"
 
 
 @dataclass
@@ -1252,26 +1459,33 @@ class SortSpec:
     def as_dict(self) -> dict:
         """Serializes the SortSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.descending is not None: body['descending'] = self.descending
-        if self.field is not None: body['field'] = self.field.value
+        if self.descending is not None:
+            body["descending"] = self.descending
+        if self.field is not None:
+            body["field"] = self.field.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SortSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.descending is not None: body['descending'] = self.descending
-        if self.field is not None: body['field'] = self.field
+        if self.descending is not None:
+            body["descending"] = self.descending
+        if self.field is not None:
+            body["field"] = self.field
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SortSpec:
         """Deserializes the SortSpec from a dictionary."""
-        return cls(descending=d.get('descending', None), field=_enum(d, 'field', SortSpecField))
+        return cls(
+            descending=d.get("descending", None),
+            field=_enum(d, "field", SortSpecField),
+        )
 
 
 class SortSpecField(Enum):
 
-    POLICY_NAME = 'POLICY_NAME'
+    POLICY_NAME = "POLICY_NAME"
 
 
 @dataclass
@@ -1297,34 +1511,43 @@ class UpdateBudgetConfigurationBudget:
     def as_dict(self) -> dict:
         """Serializes the UpdateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
         if self.alert_configurations:
-            body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations]
+            body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations]
         if self.budget_configuration_id is not None:
-            body['budget_configuration_id'] = self.budget_configuration_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.filter: body['filter'] = self.filter.as_dict()
+            body["budget_configuration_id"] = self.budget_configuration_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.filter:
+            body["filter"] = self.filter.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.alert_configurations:
+            body["alert_configurations"] = self.alert_configurations
         if self.budget_configuration_id is not None:
-            body['budget_configuration_id'] = self.budget_configuration_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.filter: body['filter'] = self.filter
+            body["budget_configuration_id"] = self.budget_configuration_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.filter:
+            body["filter"] = self.filter
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget:
         """Deserializes the UpdateBudgetConfigurationBudget from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration),
-                   budget_configuration_id=d.get('budget_configuration_id', None),
-                   display_name=d.get('display_name', None),
-                   filter=_from_dict(d, 'filter', BudgetConfigurationFilter))
+        return cls(
+            account_id=d.get("account_id", None),
+            alert_configurations=_repeated_dict(d, "alert_configurations", AlertConfiguration),
+            budget_configuration_id=d.get("budget_configuration_id", None),
+            display_name=d.get("display_name", None),
+            filter=_from_dict(d, "filter", BudgetConfigurationFilter),
+        )
 
 
 @dataclass
@@ -1338,22 +1561,28 @@ class UpdateBudgetConfigurationRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget: body['budget'] = self.budget.as_dict()
-        if self.budget_id is not None: body['budget_id'] = self.budget_id
+        if self.budget:
+            body["budget"] = self.budget.as_dict()
+        if self.budget_id is not None:
+            body["budget_id"] = self.budget_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget: body['budget'] = self.budget
-        if self.budget_id is not None: body['budget_id'] = self.budget_id
+        if self.budget:
+            body["budget"] = self.budget
+        if self.budget_id is not None:
+            body["budget_id"] = self.budget_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest:
         """Deserializes the UpdateBudgetConfigurationRequest from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', UpdateBudgetConfigurationBudget),
-                   budget_id=d.get('budget_id', None))
+        return cls(
+            budget=_from_dict(d, "budget", UpdateBudgetConfigurationBudget),
+            budget_id=d.get("budget_id", None),
+        )
 
 
 @dataclass
@@ -1364,19 +1593,21 @@ class UpdateBudgetConfigurationResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget: body['budget'] = self.budget.as_dict()
+        if self.budget:
+            body["budget"] = self.budget.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget: body['budget'] = self.budget
+        if self.budget:
+            body["budget"] = self.budget
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse:
         """Deserializes the UpdateBudgetConfigurationResponse from a dictionary."""
-        return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
+        return cls(budget=_from_dict(d, "budget", BudgetConfiguration))
 
 
 @dataclass
@@ -1394,29 +1625,33 @@ def as_dict(self) -> dict:
         """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.log_delivery_configuration_id is not None:
-            body['log_delivery_configuration_id'] = self.log_delivery_configuration_id
-        if self.status is not None: body['status'] = self.status.value
+            body["log_delivery_configuration_id"] = self.log_delivery_configuration_id
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.log_delivery_configuration_id is not None:
-            body['log_delivery_configuration_id'] = self.log_delivery_configuration_id
-        if self.status is not None: body['status'] = self.status
+            body["log_delivery_configuration_id"] = self.log_delivery_configuration_id
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest:
         """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary."""
-        return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None),
-                   status=_enum(d, 'status', LogDeliveryConfigStatus))
+        return cls(
+            log_delivery_configuration_id=d.get("log_delivery_configuration_id", None),
+            status=_enum(d, "status", LogDeliveryConfigStatus),
+        )
 
 
 class UsageDashboardType(Enum):
 
-    USAGE_DASHBOARD_TYPE_GLOBAL = 'USAGE_DASHBOARD_TYPE_GLOBAL'
-    USAGE_DASHBOARD_TYPE_WORKSPACE = 'USAGE_DASHBOARD_TYPE_WORKSPACE'
+    USAGE_DASHBOARD_TYPE_GLOBAL = "USAGE_DASHBOARD_TYPE_GLOBAL"
+    USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE"
 
 
 @dataclass
@@ -1427,21 +1662,26 @@ def as_dict(self) -> dict:
         """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.log_delivery_configuration:
-            body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict()
+            body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.log_delivery_configuration:
-            body['log_delivery_configuration'] = self.log_delivery_configuration
+            body["log_delivery_configuration"] = self.log_delivery_configuration
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedCreateLogDeliveryConfiguration:
         """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary."""
-        return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration',
-                                                         CreateLogDeliveryConfigurationParams))
+        return cls(
+            log_delivery_configuration=_from_dict(
+                d,
+                "log_delivery_configuration",
+                CreateLogDeliveryConfigurationParams,
+            )
+        )
 
 
 @dataclass
@@ -1452,21 +1692,20 @@ def as_dict(self) -> dict:
         """Serializes the WrappedLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.log_delivery_configuration:
-            body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict()
+            body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WrappedLogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.log_delivery_configuration:
-            body['log_delivery_configuration'] = self.log_delivery_configuration
+            body["log_delivery_configuration"] = self.log_delivery_configuration
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfiguration:
         """Deserializes the WrappedLogDeliveryConfiguration from a dictionary."""
-        return cls(
-            log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', LogDeliveryConfiguration))
+        return cls(log_delivery_configuration=_from_dict(d, "log_delivery_configuration", LogDeliveryConfiguration))
 
 
 @dataclass
@@ -1477,21 +1716,22 @@ def as_dict(self) -> dict:
         """Serializes the WrappedLogDeliveryConfigurations into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.log_delivery_configurations:
-            body['log_delivery_configurations'] = [v.as_dict() for v in self.log_delivery_configurations]
+            body["log_delivery_configurations"] = [v.as_dict() for v in self.log_delivery_configurations]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WrappedLogDeliveryConfigurations into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.log_delivery_configurations:
-            body['log_delivery_configurations'] = self.log_delivery_configurations
+            body["log_delivery_configurations"] = self.log_delivery_configurations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfigurations:
         """Deserializes the WrappedLogDeliveryConfigurations from a dictionary."""
-        return cls(log_delivery_configurations=_repeated_dict(d, 'log_delivery_configurations',
-                                                              LogDeliveryConfiguration))
+        return cls(
+            log_delivery_configurations=_repeated_dict(d, "log_delivery_configurations", LogDeliveryConfiguration)
+        )
 
 
 class BillableUsageAPI:
@@ -1501,22 +1741,24 @@ class BillableUsageAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def download(self,
-                 start_month: str,
-                 end_month: str,
-                 *,
-                 personal_data: Optional[bool] = None) -> DownloadResponse:
+    def download(
+        self,
+        start_month: str,
+        end_month: str,
+        *,
+        personal_data: Optional[bool] = None,
+    ) -> DownloadResponse:
         """Return billable usage logs.
-        
+
         Returns billable usage logs in CSV format for the specified account and date range. For the data
         schema, see [CSV file schema]. Note that this method might take multiple minutes to complete.
-        
+
         **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of
         the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If
         you experience this, try to mitigate by calling the API with narrower date ranges.
-        
+
         [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
-        
+
         :param start_month: str
           Format: `YYYY-MM`. First month to return billable usage logs for. This field is required.
         :param end_month: str
@@ -1525,21 +1767,28 @@ def download(self,
           Specify whether to include personally identifiable information in the billable usage logs, for
           example the email addresses of cluster creators. Handle this information with care. Defaults to
           false.
-        
+
         :returns: :class:`DownloadResponse`
         """
 
         query = {}
-        if end_month is not None: query['end_month'] = end_month
-        if personal_data is not None: query['personal_data'] = personal_data
-        if start_month is not None: query['start_month'] = start_month
-        headers = {'Accept': 'text/plain', }
-
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/usage/download',
-                           query=query,
-                           headers=headers,
-                           raw=True)
+        if end_month is not None:
+            query["end_month"] = end_month
+        if personal_data is not None:
+            query["personal_data"] = personal_data
+        if start_month is not None:
+            query["start_month"] = start_month
+        headers = {
+            "Accept": "text/plain",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/usage/download",
+            query=query,
+            headers=headers,
+            raw=True,
+        )
         return DownloadResponse.from_dict(res)
 
 
@@ -1549,15 +1798,17 @@ class BudgetPolicyAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               custom_tags: Optional[List[compute.CustomPolicyTag]] = None,
-               policy_name: Optional[str] = None,
-               request_id: Optional[str] = None) -> BudgetPolicy:
+    def create(
+        self,
+        *,
+        custom_tags: Optional[List[compute.CustomPolicyTag]] = None,
+        policy_name: Optional[str] = None,
+        request_id: Optional[str] = None,
+    ) -> BudgetPolicy:
         """Create a budget policy.
-        
+
         Creates a new policy.
-        
+
         :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
           A list of tags defined by the customer. At most 40 entries are allowed per policy.
         :param policy_name: str (optional)
@@ -1566,66 +1817,84 @@ def create(self,
         :param request_id: str (optional)
           A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
           recommended. This request is only idempotent if a `request_id` is provided.
-        
+
         :returns: :class:`BudgetPolicy`
         """
         body = {}
-        if custom_tags is not None: body['custom_tags'] = [v.as_dict() for v in custom_tags]
-        if policy_name is not None: body['policy_name'] = policy_name
-        if request_id is not None: body['request_id'] = request_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if custom_tags is not None:
+            body["custom_tags"] = [v.as_dict() for v in custom_tags]
+        if policy_name is not None:
+            body["policy_name"] = policy_name
+        if request_id is not None:
+            body["request_id"] = request_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/accounts/{self._api.account_id}/budget-policies",
+            body=body,
+            headers=headers,
+        )
         return BudgetPolicy.from_dict(res)
 
     def delete(self, policy_id: str):
         """Delete a budget policy.
-        
+
         Deletes a policy
-        
+
         :param policy_id: str
           The Id of the policy.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}",
+            headers=headers,
+        )
 
     def get(self, policy_id: str) -> BudgetPolicy:
         """Get a budget policy.
-        
+
         Retrieves a policy by it's ID.
-        
+
         :param policy_id: str
           The Id of the policy.
-        
+
         :returns: :class:`BudgetPolicy`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}",
+            headers=headers,
+        )
         return BudgetPolicy.from_dict(res)
 
-    def list(self,
-             *,
-             filter_by: Optional[Filter] = None,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None,
-             sort_spec: Optional[SortSpec] = None) -> Iterator[BudgetPolicy]:
+    def list(
+        self,
+        *,
+        filter_by: Optional[Filter] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+        sort_spec: Optional[SortSpec] = None,
+    ) -> Iterator[BudgetPolicy]:
         """List policies.
-        
+
         Lists all policies. Policies are returned in the alphabetically ascending order of their names.
-        
+
         :param filter_by: :class:`Filter` (optional)
           A filter to apply to the list of policies.
         :param page_size: int (optional)
@@ -1634,62 +1903,78 @@ def list(self,
         :param page_token: str (optional)
           A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
           subsequent page. If unspecified, the first page will be returned.
-          
+
           When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
           call that provided the page token.
         :param sort_spec: :class:`SortSpec` (optional)
           The sort specification.
-        
+
         :returns: Iterator over :class:`BudgetPolicy`
         """
 
         query = {}
-        if filter_by is not None: query['filter_by'] = filter_by.as_dict()
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if sort_spec is not None: query['sort_spec'] = sort_spec.as_dict()
-        headers = {'Accept': 'application/json', }
+        if filter_by is not None:
+            query["filter_by"] = filter_by.as_dict()
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if sort_spec is not None:
+            query["sort_spec"] = sort_spec.as_dict()
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/accounts/{self._api.account_id}/budget-policies',
-                                query=query,
-                                headers=headers)
-            if 'policies' in json:
-                for v in json['policies']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/accounts/{self._api.account_id}/budget-policies",
+                query=query,
+                headers=headers,
+            )
+            if "policies" in json:
+                for v in json["policies"]:
                     yield BudgetPolicy.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
-    def update(self,
-               policy_id: str,
-               *,
-               limit_config: Optional[LimitConfig] = None,
-               policy: Optional[BudgetPolicy] = None) -> BudgetPolicy:
+    def update(
+        self,
+        policy_id: str,
+        *,
+        limit_config: Optional[LimitConfig] = None,
+        policy: Optional[BudgetPolicy] = None,
+    ) -> BudgetPolicy:
         """Update a budget policy.
-        
+
         Updates a policy
-        
+
         :param policy_id: str
           The Id of the policy. This field is generated by Databricks and globally unique.
         :param limit_config: :class:`LimitConfig` (optional)
           DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
         :param policy: :class:`BudgetPolicy` (optional)
           Contains the BudgetPolicy details.
-        
+
         :returns: :class:`BudgetPolicy`
         """
         body = policy.as_dict()
         query = {}
-        if limit_config is not None: query['limit_config'] = limit_config.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
-                           query=query,
-                           body=body,
-                           headers=headers)
+        if limit_config is not None:
+            query["limit_config"] = limit_config.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}",
+            query=query,
+            body=body,
+            headers=headers,
+        )
         return BudgetPolicy.from_dict(res)
 
 
@@ -1703,111 +1988,135 @@ def __init__(self, api_client):
 
     def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse:
         """Create new budget.
-        
+
         Create a new budget configuration for an account. For full details, see
         https://docs.databricks.com/en/admin/account-settings/budgets.html.
-        
+
         :param budget: :class:`CreateBudgetConfigurationBudget`
           Properties of the new budget configuration.
-        
+
         :returns: :class:`CreateBudgetConfigurationResponse`
         """
         body = {}
-        if budget is not None: body['budget'] = budget.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if budget is not None:
+            body["budget"] = budget.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.1/accounts/{self._api.account_id}/budgets',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/accounts/{self._api.account_id}/budgets",
+            body=body,
+            headers=headers,
+        )
         return CreateBudgetConfigurationResponse.from_dict(res)
 
     def delete(self, budget_id: str):
         """Delete budget.
-        
+
         Deletes a budget configuration for an account. Both account and budget configuration are specified by
         ID. This cannot be undone.
-        
+
         :param budget_id: str
           The Databricks budget configuration ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}",
+            headers=headers,
+        )
 
     def get(self, budget_id: str) -> GetBudgetConfigurationResponse:
         """Get budget.
-        
+
         Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
-        
+
         :param budget_id: str
           The budget configuration ID
-        
+
         :returns: :class:`GetBudgetConfigurationResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}",
+            headers=headers,
+        )
         return GetBudgetConfigurationResponse.from_dict(res)
 
     def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]:
         """Get all budgets.
-        
+
         Gets all budgets associated with this account.
-        
+
         :param page_token: str (optional)
           A page token received from a previous get all budget configurations call. This token can be used to
           retrieve the subsequent page. Requests first page if absent.
-        
+
         :returns: Iterator over :class:`BudgetConfiguration`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/accounts/{self._api.account_id}/budgets',
-                                query=query,
-                                headers=headers)
-            if 'budgets' in json:
-                for v in json['budgets']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/accounts/{self._api.account_id}/budgets",
+                query=query,
+                headers=headers,
+            )
+            if "budgets" in json:
+                for v in json["budgets"]:
                     yield BudgetConfiguration.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
-    def update(self, budget_id: str,
-               budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse:
+    def update(self, budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse:
         """Modify budget.
-        
+
         Updates a budget configuration for an account. Both account and budget configuration are specified by
         ID.
-        
+
         :param budget_id: str
           The Databricks budget configuration ID.
         :param budget: :class:`UpdateBudgetConfigurationBudget`
           The updated budget. This will overwrite the budget specified by the budget ID.
-        
+
         :returns: :class:`UpdateBudgetConfigurationResponse`
         """
         body = {}
-        if budget is not None: body['budget'] = budget.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if budget is not None:
+            body["budget"] = budget.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}",
+            body=body,
+            headers=headers,
+        )
         return UpdateBudgetConfigurationResponse.from_dict(res)
 
 
@@ -1815,12 +2124,12 @@ class LogDeliveryAPI:
     """These APIs manage log delivery configurations for this account. The two supported log types for this API
     are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all
     account ID types.
-    
+
     Log delivery works with all account types. However, if your account is on the E2 version of the platform
     or on a select custom plan that allows multiple workspaces per account, you can optionally configure
     different storage destinations for each workspace. Log delivery status is also provided to know the latest
     status of log delivery attempts. The high-level flow of billable usage delivery:
-    
+
     1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using
     Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create)
     that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For
@@ -1834,7 +2143,7 @@ class LogDeliveryAPI:
     Account level log delivery applies to all current and future workspaces plus account level logs, while
     workspace level log delivery solely delivers logs related to the specified workspaces. You can create
     multiple types of delivery configurations per account.
-    
+
     For billable usage delivery: * For more information about billable usage logs, see [Billable usage log
     delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
     `//billable-usage/csv/`, where `` is the name of the optional delivery path
@@ -1843,7 +2152,7 @@ class LogDeliveryAPI:
     workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an
     _account level_ delivery configuration that delivers logs for all current and future workspaces in your
     account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
-    
+
     For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery],
     which includes information about the used JSON schema. * The delivery location is
     `//workspaceId=/date=/auditlogs_.json`.
@@ -1853,11 +2162,12 @@ class LogDeliveryAPI:
     level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all
     workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. *
     Auditable events are typically available in logs within 15 minutes.
-    
+
     [Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
     [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
     [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
-    [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html"""
+    [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+    """
 
     def __init__(self, api_client):
         self._api = api_client
@@ -1865,105 +2175,126 @@ def __init__(self, api_client):
     def create(
         self,
         *,
-        log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams] = None
+        log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams] = None,
     ) -> WrappedLogDeliveryConfiguration:
         """Create a new log delivery configuration.
-        
+
         Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs
         to your storage location. This requires that you already created a [credential
         object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
         [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
-        
+
         For full details, including the required IAM role policies and bucket policies, see [Deliver and
         access billable usage logs] or [Configure audit logging].
-        
+
         **Note**: There is a limit on the number of log delivery configurations available per account (each
         limit applies separately to each log type including billable usage and audit logs). You can create a
         maximum of two enabled account-level delivery configurations (configurations without a workspace
         filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per
         workspace for each log type, which means that the same workspace ID can occur in the workspace filter
         for no more than two delivery configurations per log type.
-        
+
         You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log
         delivery configuration](:method:LogDelivery/PatchStatus)).
-        
+
         [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
         [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
-        
+
         :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional)
-        
+
         :returns: :class:`WrappedLogDeliveryConfiguration`
         """
         body = {}
         if log_delivery_configuration is not None:
-            body['log_delivery_configuration'] = log_delivery_configuration.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["log_delivery_configuration"] = log_delivery_configuration.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/log-delivery',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/log-delivery",
+            body=body,
+            headers=headers,
+        )
         return WrappedLogDeliveryConfiguration.from_dict(res)
 
     def get(self, log_delivery_configuration_id: str) -> WrappedLogDeliveryConfiguration:
         """Get log delivery configuration.
-        
+
         Gets a Databricks log delivery configuration object for an account, both specified by ID.
-        
+
         :param log_delivery_configuration_id: str
           Databricks log delivery configuration ID
-        
+
         :returns: :class:`WrappedLogDeliveryConfiguration`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}",
+            headers=headers,
+        )
         return WrappedLogDeliveryConfiguration.from_dict(res)
 
-    def list(self,
-             *,
-             credentials_id: Optional[str] = None,
-             status: Optional[LogDeliveryConfigStatus] = None,
-             storage_configuration_id: Optional[str] = None) -> Iterator[LogDeliveryConfiguration]:
+    def list(
+        self,
+        *,
+        credentials_id: Optional[str] = None,
+        status: Optional[LogDeliveryConfigStatus] = None,
+        storage_configuration_id: Optional[str] = None,
+    ) -> Iterator[LogDeliveryConfiguration]:
         """Get all log delivery configurations.
-        
+
         Gets all Databricks log delivery configurations associated with an account specified by ID.
-        
+
         :param credentials_id: str (optional)
           Filter by credential configuration ID.
         :param status: :class:`LogDeliveryConfigStatus` (optional)
           Filter by status `ENABLED` or `DISABLED`.
         :param storage_configuration_id: str (optional)
           Filter by storage configuration ID.
-        
+
         :returns: Iterator over :class:`LogDeliveryConfiguration`
         """
 
         query = {}
-        if credentials_id is not None: query['credentials_id'] = credentials_id
-        if status is not None: query['status'] = status.value
-        if storage_configuration_id is not None: query['storage_configuration_id'] = storage_configuration_id
-        headers = {'Accept': 'application/json', }
-
-        json = self._api.do('GET',
-                            f'/api/2.0/accounts/{self._api.account_id}/log-delivery',
-                            query=query,
-                            headers=headers)
+        if credentials_id is not None:
+            query["credentials_id"] = credentials_id
+        if status is not None:
+            query["status"] = status.value
+        if storage_configuration_id is not None:
+            query["storage_configuration_id"] = storage_configuration_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        json = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/log-delivery",
+            query=query,
+            headers=headers,
+        )
         parsed = WrappedLogDeliveryConfigurations.from_dict(json).log_delivery_configurations
         return parsed if parsed is not None else []
 
-    def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryConfigStatus):
+    def patch_status(
+        self,
+        log_delivery_configuration_id: str,
+        status: LogDeliveryConfigStatus,
+    ):
         """Enable or disable log delivery configuration.
-        
+
         Enables or disables a log delivery configuration. Deletion of delivery configurations is not
         supported, so disable log delivery configurations that are no longer needed. Note that you can't
         re-enable a delivery configuration if this would violate the delivery configuration limits described
         under [Create log delivery](:method:LogDelivery/Create).
-        
+
         :param log_delivery_configuration_id: str
           Databricks log delivery configuration ID
         :param status: :class:`LogDeliveryConfigStatus`
@@ -1971,17 +2302,23 @@ def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryCo
           to `ENABLED`. You can [enable or disable the
           configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is
           not supported, so disable a log delivery configuration that is no longer needed.
-        
-        
+
+
         """
         body = {}
-        if status is not None: body['status'] = status.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if status is not None:
+            body["status"] = status.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH',
-                     f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}',
-                     body=body,
-                     headers=headers)
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class UsageDashboardsAPI:
@@ -1992,57 +2329,74 @@ class UsageDashboardsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               dashboard_type: Optional[UsageDashboardType] = None,
-               workspace_id: Optional[int] = None) -> CreateBillingUsageDashboardResponse:
+    def create(
+        self,
+        *,
+        dashboard_type: Optional[UsageDashboardType] = None,
+        workspace_id: Optional[int] = None,
+    ) -> CreateBillingUsageDashboardResponse:
         """Create new usage dashboard.
-        
+
         Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
-        
+
         :param dashboard_type: :class:`UsageDashboardType` (optional)
           Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
           dashboard shows usage data for all workspaces in the account.
         :param workspace_id: int (optional)
           The workspace ID of the workspace in which the usage dashboard is created.
-        
+
         :returns: :class:`CreateBillingUsageDashboardResponse`
         """
         body = {}
-        if dashboard_type is not None: body['dashboard_type'] = dashboard_type.value
-        if workspace_id is not None: body['workspace_id'] = workspace_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if dashboard_type is not None:
+            body["dashboard_type"] = dashboard_type.value
+        if workspace_id is not None:
+            body["workspace_id"] = workspace_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/dashboard',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/dashboard",
+            body=body,
+            headers=headers,
+        )
         return CreateBillingUsageDashboardResponse.from_dict(res)
 
-    def get(self,
-            *,
-            dashboard_type: Optional[UsageDashboardType] = None,
-            workspace_id: Optional[int] = None) -> GetBillingUsageDashboardResponse:
+    def get(
+        self,
+        *,
+        dashboard_type: Optional[UsageDashboardType] = None,
+        workspace_id: Optional[int] = None,
+    ) -> GetBillingUsageDashboardResponse:
         """Get usage dashboard.
-        
+
         Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
-        
+
         :param dashboard_type: :class:`UsageDashboardType` (optional)
           Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
           dashboard shows usage data for all workspaces in the account.
         :param workspace_id: int (optional)
           The workspace ID of the workspace in which the usage dashboard is created.
-        
+
         :returns: :class:`GetBillingUsageDashboardResponse`
         """
 
         query = {}
-        if dashboard_type is not None: query['dashboard_type'] = dashboard_type.value
-        if workspace_id is not None: query['workspace_id'] = workspace_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/dashboard',
-                           query=query,
-                           headers=headers)
+        if dashboard_type is not None:
+            query["dashboard_type"] = dashboard_type.value
+        if workspace_id is not None:
+            query["workspace_id"] = workspace_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/dashboard",
+            query=query,
+            headers=headers,
+        )
         return GetBillingUsageDashboardResponse.from_dict(res)
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index 83d7de4e8..038dd6d42 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -25,19 +25,21 @@ class AccountsCreateMetastore:
     def as_dict(self) -> dict:
         """Serializes the AccountsCreateMetastore into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
+        if self.metastore_info:
+            body["metastore_info"] = self.metastore_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        if self.metastore_info:
+            body["metastore_info"] = self.metastore_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastore:
         """Deserializes the AccountsCreateMetastore from a dictionary."""
-        return cls(metastore_info=_from_dict(d, 'metastore_info', CreateMetastore))
+        return cls(metastore_info=_from_dict(d, "metastore_info", CreateMetastore))
 
 
 @dataclass
@@ -53,25 +55,33 @@ class AccountsCreateMetastoreAssignment:
     def as_dict(self) -> dict:
         """Serializes the AccountsCreateMetastoreAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict()
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.metastore_assignment:
+            body["metastore_assignment"] = self.metastore_assignment.as_dict()
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.metastore_assignment:
+            body["metastore_assignment"] = self.metastore_assignment
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastoreAssignment:
         """Deserializes the AccountsCreateMetastoreAssignment from a dictionary."""
-        return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', CreateMetastoreAssignment),
-                   metastore_id=d.get('metastore_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            metastore_assignment=_from_dict(d, "metastore_assignment", CreateMetastoreAssignment),
+            metastore_id=d.get("metastore_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -84,22 +94,28 @@ class AccountsCreateStorageCredential:
     def as_dict(self) -> dict:
         """Serializes the AccountsCreateStorageCredential into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_info: body['credential_info'] = self.credential_info.as_dict()
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.credential_info:
+            body["credential_info"] = self.credential_info.as_dict()
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_info: body['credential_info'] = self.credential_info
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.credential_info:
+            body["credential_info"] = self.credential_info
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateStorageCredential:
         """Deserializes the AccountsCreateStorageCredential from a dictionary."""
-        return cls(credential_info=_from_dict(d, 'credential_info', CreateStorageCredential),
-                   metastore_id=d.get('metastore_id', None))
+        return cls(
+            credential_info=_from_dict(d, "credential_info", CreateStorageCredential),
+            metastore_id=d.get("metastore_id", None),
+        )
 
 
 @dataclass
@@ -109,19 +125,21 @@ class AccountsMetastoreAssignment:
     def as_dict(self) -> dict:
         """Serializes the AccountsMetastoreAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict()
+        if self.metastore_assignment:
+            body["metastore_assignment"] = self.metastore_assignment.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsMetastoreAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        if self.metastore_assignment:
+            body["metastore_assignment"] = self.metastore_assignment
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreAssignment:
         """Deserializes the AccountsMetastoreAssignment from a dictionary."""
-        return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', MetastoreAssignment))
+        return cls(metastore_assignment=_from_dict(d, "metastore_assignment", MetastoreAssignment))
 
 
 @dataclass
@@ -131,19 +149,21 @@ class AccountsMetastoreInfo:
     def as_dict(self) -> dict:
         """Serializes the AccountsMetastoreInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
+        if self.metastore_info:
+            body["metastore_info"] = self.metastore_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        if self.metastore_info:
+            body["metastore_info"] = self.metastore_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreInfo:
         """Deserializes the AccountsMetastoreInfo from a dictionary."""
-        return cls(metastore_info=_from_dict(d, 'metastore_info', MetastoreInfo))
+        return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo))
 
 
 @dataclass
@@ -153,19 +173,21 @@ class AccountsStorageCredentialInfo:
     def as_dict(self) -> dict:
         """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_info: body['credential_info'] = self.credential_info.as_dict()
+        if self.credential_info:
+            body["credential_info"] = self.credential_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_info: body['credential_info'] = self.credential_info
+        if self.credential_info:
+            body["credential_info"] = self.credential_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsStorageCredentialInfo:
         """Deserializes the AccountsStorageCredentialInfo from a dictionary."""
-        return cls(credential_info=_from_dict(d, 'credential_info', StorageCredentialInfo))
+        return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo))
 
 
 @dataclass
@@ -178,22 +200,28 @@ class AccountsUpdateMetastore:
     def as_dict(self) -> dict:
         """Serializes the AccountsUpdateMetastore into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.metastore_info:
+            body["metastore_info"] = self.metastore_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.metastore_info:
+            body["metastore_info"] = self.metastore_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastore:
         """Deserializes the AccountsUpdateMetastore from a dictionary."""
-        return cls(metastore_id=d.get('metastore_id', None),
-                   metastore_info=_from_dict(d, 'metastore_info', UpdateMetastore))
+        return cls(
+            metastore_id=d.get("metastore_id", None),
+            metastore_info=_from_dict(d, "metastore_info", UpdateMetastore),
+        )
 
 
 @dataclass
@@ -209,25 +237,33 @@ class AccountsUpdateMetastoreAssignment:
     def as_dict(self) -> dict:
         """Serializes the AccountsUpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict()
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.metastore_assignment:
+            body["metastore_assignment"] = self.metastore_assignment.as_dict()
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.metastore_assignment:
+            body["metastore_assignment"] = self.metastore_assignment
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastoreAssignment:
         """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary."""
-        return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', UpdateMetastoreAssignment),
-                   metastore_id=d.get('metastore_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            metastore_assignment=_from_dict(d, "metastore_assignment", UpdateMetastoreAssignment),
+            metastore_id=d.get("metastore_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -243,27 +279,33 @@ class AccountsUpdateStorageCredential:
     def as_dict(self) -> dict:
         """Serializes the AccountsUpdateStorageCredential into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_info: body['credential_info'] = self.credential_info.as_dict()
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.credential_info:
+            body["credential_info"] = self.credential_info.as_dict()
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
         if self.storage_credential_name is not None:
-            body['storage_credential_name'] = self.storage_credential_name
+            body["storage_credential_name"] = self.storage_credential_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_info: body['credential_info'] = self.credential_info
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.credential_info:
+            body["credential_info"] = self.credential_info
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
         if self.storage_credential_name is not None:
-            body['storage_credential_name'] = self.storage_credential_name
+            body["storage_credential_name"] = self.storage_credential_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateStorageCredential:
         """Deserializes the AccountsUpdateStorageCredential from a dictionary."""
-        return cls(credential_info=_from_dict(d, 'credential_info', UpdateStorageCredential),
-                   metastore_id=d.get('metastore_id', None),
-                   storage_credential_name=d.get('storage_credential_name', None))
+        return cls(
+            credential_info=_from_dict(d, "credential_info", UpdateStorageCredential),
+            metastore_id=d.get("metastore_id", None),
+            storage_credential_name=d.get("storage_credential_name", None),
+        )
 
 
 @dataclass
@@ -283,28 +325,38 @@ class ArtifactAllowlistInfo:
     def as_dict(self) -> dict:
         """Serializes the ArtifactAllowlistInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact_matchers: body['artifact_matchers'] = [v.as_dict() for v in self.artifact_matchers]
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.artifact_matchers:
+            body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers]
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ArtifactAllowlistInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.artifact_matchers:
+            body["artifact_matchers"] = self.artifact_matchers
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ArtifactAllowlistInfo:
         """Deserializes the ArtifactAllowlistInfo from a dictionary."""
-        return cls(artifact_matchers=_repeated_dict(d, 'artifact_matchers', ArtifactMatcher),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   metastore_id=d.get('metastore_id', None))
+        return cls(
+            artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            metastore_id=d.get("metastore_id", None),
+        )
 
 
 @dataclass
@@ -318,29 +370,36 @@ class ArtifactMatcher:
     def as_dict(self) -> dict:
         """Serializes the ArtifactMatcher into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact is not None: body['artifact'] = self.artifact
-        if self.match_type is not None: body['match_type'] = self.match_type.value
+        if self.artifact is not None:
+            body["artifact"] = self.artifact
+        if self.match_type is not None:
+            body["match_type"] = self.match_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ArtifactMatcher into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact is not None: body['artifact'] = self.artifact
-        if self.match_type is not None: body['match_type'] = self.match_type
+        if self.artifact is not None:
+            body["artifact"] = self.artifact
+        if self.match_type is not None:
+            body["match_type"] = self.match_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ArtifactMatcher:
         """Deserializes the ArtifactMatcher from a dictionary."""
-        return cls(artifact=d.get('artifact', None), match_type=_enum(d, 'match_type', MatchType))
+        return cls(
+            artifact=d.get("artifact", None),
+            match_type=_enum(d, "match_type", MatchType),
+        )
 
 
 class ArtifactType(Enum):
     """The artifact type"""
 
-    INIT_SCRIPT = 'INIT_SCRIPT'
-    LIBRARY_JAR = 'LIBRARY_JAR'
-    LIBRARY_MAVEN = 'LIBRARY_MAVEN'
+    INIT_SCRIPT = "INIT_SCRIPT"
+    LIBRARY_JAR = "LIBRARY_JAR"
+    LIBRARY_MAVEN = "LIBRARY_MAVEN"
 
 
 @dataclass
@@ -365,7 +424,8 @@ def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
 @dataclass
 class AwsCredentials:
     """AWS temporary credentials for API authentication. Read more at
-    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.
+    """
 
     access_key_id: Optional[str] = None
     """The access key ID that identifies the temporary credentials."""
@@ -383,28 +443,38 @@ class AwsCredentials:
     def as_dict(self) -> dict:
         """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
-        if self.session_token is not None: body['session_token'] = self.session_token
+        if self.access_key_id is not None:
+            body["access_key_id"] = self.access_key_id
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.secret_access_key is not None:
+            body["secret_access_key"] = self.secret_access_key
+        if self.session_token is not None:
+            body["session_token"] = self.session_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
-        if self.session_token is not None: body['session_token'] = self.session_token
+        if self.access_key_id is not None:
+            body["access_key_id"] = self.access_key_id
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.secret_access_key is not None:
+            body["secret_access_key"] = self.secret_access_key
+        if self.session_token is not None:
+            body["session_token"] = self.session_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
         """Deserializes the AwsCredentials from a dictionary."""
-        return cls(access_key_id=d.get('access_key_id', None),
-                   access_point=d.get('access_point', None),
-                   secret_access_key=d.get('secret_access_key', None),
-                   session_token=d.get('session_token', None))
+        return cls(
+            access_key_id=d.get("access_key_id", None),
+            access_point=d.get("access_point", None),
+            secret_access_key=d.get("secret_access_key", None),
+            session_token=d.get("session_token", None),
+        )
 
 
 @dataclass
@@ -424,25 +494,33 @@ class AwsIamRole:
     def as_dict(self) -> dict:
         """Serializes the AwsIamRole into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.external_id is not None: body['external_id'] = self.external_id
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
-        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        if self.external_id is not None:
+            body["external_id"] = self.external_id
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
+        if self.unity_catalog_iam_arn is not None:
+            body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsIamRole into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.external_id is not None: body['external_id'] = self.external_id
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
-        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        if self.external_id is not None:
+            body["external_id"] = self.external_id
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
+        if self.unity_catalog_iam_arn is not None:
+            body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRole:
         """Deserializes the AwsIamRole from a dictionary."""
-        return cls(external_id=d.get('external_id', None),
-                   role_arn=d.get('role_arn', None),
-                   unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None))
+        return cls(
+            external_id=d.get("external_id", None),
+            role_arn=d.get("role_arn", None),
+            unity_catalog_iam_arn=d.get("unity_catalog_iam_arn", None),
+        )
 
 
 @dataclass
@@ -453,19 +531,21 @@ class AwsIamRoleRequest:
     def as_dict(self) -> dict:
         """Serializes the AwsIamRoleRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsIamRoleRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleRequest:
         """Deserializes the AwsIamRoleRequest from a dictionary."""
-        return cls(role_arn=d.get('role_arn', None))
+        return cls(role_arn=d.get("role_arn", None))
 
 
 @dataclass
@@ -483,32 +563,41 @@ class AwsIamRoleResponse:
     def as_dict(self) -> dict:
         """Serializes the AwsIamRoleResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.external_id is not None: body['external_id'] = self.external_id
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
-        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        if self.external_id is not None:
+            body["external_id"] = self.external_id
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
+        if self.unity_catalog_iam_arn is not None:
+            body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsIamRoleResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.external_id is not None: body['external_id'] = self.external_id
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
-        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        if self.external_id is not None:
+            body["external_id"] = self.external_id
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
+        if self.unity_catalog_iam_arn is not None:
+            body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse:
         """Deserializes the AwsIamRoleResponse from a dictionary."""
-        return cls(external_id=d.get('external_id', None),
-                   role_arn=d.get('role_arn', None),
-                   unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None))
+        return cls(
+            external_id=d.get("external_id", None),
+            role_arn=d.get("role_arn", None),
+            unity_catalog_iam_arn=d.get("unity_catalog_iam_arn", None),
+        )
 
 
 @dataclass
 class AzureActiveDirectoryToken:
     """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
     Identity. Read more at
-    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token
+    """
 
     aad_token: Optional[str] = None
     """Opaque token that contains claims that you can use in Azure Active Directory to access cloud
@@ -517,19 +606,21 @@ class AzureActiveDirectoryToken:
     def as_dict(self) -> dict:
         """Serializes the AzureActiveDirectoryToken into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aad_token is not None: body['aad_token'] = self.aad_token
+        if self.aad_token is not None:
+            body["aad_token"] = self.aad_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureActiveDirectoryToken into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aad_token is not None: body['aad_token'] = self.aad_token
+        if self.aad_token is not None:
+            body["aad_token"] = self.aad_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken:
         """Deserializes the AzureActiveDirectoryToken from a dictionary."""
-        return cls(aad_token=d.get('aad_token', None))
+        return cls(aad_token=d.get("aad_token", None))
 
 
 @dataclass
@@ -555,25 +646,33 @@ class AzureManagedIdentity:
     def as_dict(self) -> dict:
         """Serializes the AzureManagedIdentity into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        if self.access_connector_id is not None:
+            body["access_connector_id"] = self.access_connector_id
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.managed_identity_id is not None:
+            body["managed_identity_id"] = self.managed_identity_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureManagedIdentity into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        if self.access_connector_id is not None:
+            body["access_connector_id"] = self.access_connector_id
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.managed_identity_id is not None:
+            body["managed_identity_id"] = self.managed_identity_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentity:
         """Deserializes the AzureManagedIdentity from a dictionary."""
-        return cls(access_connector_id=d.get('access_connector_id', None),
-                   credential_id=d.get('credential_id', None),
-                   managed_identity_id=d.get('managed_identity_id', None))
+        return cls(
+            access_connector_id=d.get("access_connector_id", None),
+            credential_id=d.get("credential_id", None),
+            managed_identity_id=d.get("managed_identity_id", None),
+        )
 
 
 @dataclass
@@ -592,22 +691,28 @@ class AzureManagedIdentityRequest:
     def as_dict(self) -> dict:
         """Serializes the AzureManagedIdentityRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
-        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        if self.access_connector_id is not None:
+            body["access_connector_id"] = self.access_connector_id
+        if self.managed_identity_id is not None:
+            body["managed_identity_id"] = self.managed_identity_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureManagedIdentityRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
-        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        if self.access_connector_id is not None:
+            body["access_connector_id"] = self.access_connector_id
+        if self.managed_identity_id is not None:
+            body["managed_identity_id"] = self.managed_identity_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityRequest:
         """Deserializes the AzureManagedIdentityRequest from a dictionary."""
-        return cls(access_connector_id=d.get('access_connector_id', None),
-                   managed_identity_id=d.get('managed_identity_id', None))
+        return cls(
+            access_connector_id=d.get("access_connector_id", None),
+            managed_identity_id=d.get("managed_identity_id", None),
+        )
 
 
 @dataclass
@@ -629,25 +734,33 @@ class AzureManagedIdentityResponse:
     def as_dict(self) -> dict:
         """Serializes the AzureManagedIdentityResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        if self.access_connector_id is not None:
+            body["access_connector_id"] = self.access_connector_id
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.managed_identity_id is not None:
+            body["managed_identity_id"] = self.managed_identity_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureManagedIdentityResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        if self.access_connector_id is not None:
+            body["access_connector_id"] = self.access_connector_id
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.managed_identity_id is not None:
+            body["managed_identity_id"] = self.managed_identity_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse:
         """Deserializes the AzureManagedIdentityResponse from a dictionary."""
-        return cls(access_connector_id=d.get('access_connector_id', None),
-                   credential_id=d.get('credential_id', None),
-                   managed_identity_id=d.get('managed_identity_id', None))
+        return cls(
+            access_connector_id=d.get("access_connector_id", None),
+            credential_id=d.get("credential_id", None),
+            managed_identity_id=d.get("managed_identity_id", None),
+        )
 
 
 @dataclass
@@ -666,31 +779,40 @@ class AzureServicePrincipal:
     def as_dict(self) -> dict:
         """Serializes the AzureServicePrincipal into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.application_id is not None: body['application_id'] = self.application_id
-        if self.client_secret is not None: body['client_secret'] = self.client_secret
-        if self.directory_id is not None: body['directory_id'] = self.directory_id
+        if self.application_id is not None:
+            body["application_id"] = self.application_id
+        if self.client_secret is not None:
+            body["client_secret"] = self.client_secret
+        if self.directory_id is not None:
+            body["directory_id"] = self.directory_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureServicePrincipal into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.application_id is not None: body['application_id'] = self.application_id
-        if self.client_secret is not None: body['client_secret'] = self.client_secret
-        if self.directory_id is not None: body['directory_id'] = self.directory_id
+        if self.application_id is not None:
+            body["application_id"] = self.application_id
+        if self.client_secret is not None:
+            body["client_secret"] = self.client_secret
+        if self.directory_id is not None:
+            body["directory_id"] = self.directory_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureServicePrincipal:
         """Deserializes the AzureServicePrincipal from a dictionary."""
-        return cls(application_id=d.get('application_id', None),
-                   client_secret=d.get('client_secret', None),
-                   directory_id=d.get('directory_id', None))
+        return cls(
+            application_id=d.get("application_id", None),
+            client_secret=d.get("client_secret", None),
+            directory_id=d.get("directory_id", None),
+        )
 
 
 @dataclass
 class AzureUserDelegationSas:
     """Azure temporary credentials for API authentication. Read more at
-    https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
+    https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
+    """
 
     sas_token: Optional[str] = None
     """The signed URI (SAS Token) used to access blob services for a given path"""
@@ -698,19 +820,21 @@ class AzureUserDelegationSas:
     def as_dict(self) -> dict:
         """Serializes the AzureUserDelegationSas into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.sas_token is not None: body['sas_token'] = self.sas_token
+        if self.sas_token is not None:
+            body["sas_token"] = self.sas_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureUserDelegationSas into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.sas_token is not None: body['sas_token'] = self.sas_token
+        if self.sas_token is not None:
+            body["sas_token"] = self.sas_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas:
         """Deserializes the AzureUserDelegationSas from a dictionary."""
-        return cls(sas_token=d.get('sas_token', None))
+        return cls(sas_token=d.get("sas_token", None))
 
 
 @dataclass
@@ -807,108 +931,156 @@ class CatalogInfo:
     def as_dict(self) -> dict:
         """Serializes the CatalogInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type.value
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_type is not None:
+            body["catalog_type"] = self.catalog_type.value
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.effective_predictive_optimization_flag:
-            body[
-                'effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict(
-                )
+            body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict()
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization.value
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
-        if self.provider_name is not None: body['provider_name'] = self.provider_name
-        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict()
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
-        if self.share_name is not None: body['share_name'] = self.share_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization.value
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
+        if self.provider_name is not None:
+            body["provider_name"] = self.provider_name
+        if self.provisioning_info:
+            body["provisioning_info"] = self.provisioning_info.as_dict()
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CatalogInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_type is not None:
+            body["catalog_type"] = self.catalog_type
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.effective_predictive_optimization_flag:
-            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+            body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
-        if self.provider_name is not None: body['provider_name'] = self.provider_name
-        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
-        if self.share_name is not None: body['share_name'] = self.share_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
+        if self.provider_name is not None:
+            body["provider_name"] = self.provider_name
+        if self.provisioning_info:
+            body["provisioning_info"] = self.provisioning_info
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CatalogInfo:
         """Deserializes the CatalogInfo from a dictionary."""
-        return cls(browse_only=d.get('browse_only', None),
-                   catalog_type=_enum(d, 'catalog_type', CatalogType),
-                   comment=d.get('comment', None),
-                   connection_name=d.get('connection_name', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   effective_predictive_optimization_flag=_from_dict(
-                       d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag),
-                   enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
-                                                        EnablePredictiveOptimization),
-                   full_name=d.get('full_name', None),
-                   isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   owner=d.get('owner', None),
-                   properties=d.get('properties', None),
-                   provider_name=d.get('provider_name', None),
-                   provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo),
-                   securable_type=d.get('securable_type', None),
-                   share_name=d.get('share_name', None),
-                   storage_location=d.get('storage_location', None),
-                   storage_root=d.get('storage_root', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            browse_only=d.get("browse_only", None),
+            catalog_type=_enum(d, "catalog_type", CatalogType),
+            comment=d.get("comment", None),
+            connection_name=d.get("connection_name", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            effective_predictive_optimization_flag=_from_dict(
+                d,
+                "effective_predictive_optimization_flag",
+                EffectivePredictiveOptimizationFlag,
+            ),
+            enable_predictive_optimization=_enum(
+                d,
+                "enable_predictive_optimization",
+                EnablePredictiveOptimization,
+            ),
+            full_name=d.get("full_name", None),
+            isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            owner=d.get("owner", None),
+            properties=d.get("properties", None),
+            provider_name=d.get("provider_name", None),
+            provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo),
+            securable_type=d.get("securable_type", None),
+            share_name=d.get("share_name", None),
+            storage_location=d.get("storage_location", None),
+            storage_root=d.get("storage_root", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 class CatalogIsolationMode(Enum):
     """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
-    ISOLATED = 'ISOLATED'
-    OPEN = 'OPEN'
+    ISOLATED = "ISOLATED"
+    OPEN = "OPEN"
 
 
 class CatalogType(Enum):
     """The type of the catalog."""
 
-    DELTASHARING_CATALOG = 'DELTASHARING_CATALOG'
-    MANAGED_CATALOG = 'MANAGED_CATALOG'
-    SYSTEM_CATALOG = 'SYSTEM_CATALOG'
+    DELTASHARING_CATALOG = "DELTASHARING_CATALOG"
+    MANAGED_CATALOG = "MANAGED_CATALOG"
+    SYSTEM_CATALOG = "SYSTEM_CATALOG"
 
 
 @dataclass
@@ -925,25 +1097,33 @@ class CloudflareApiToken:
     def as_dict(self) -> dict:
         """Serializes the CloudflareApiToken into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.access_key_id is not None:
+            body["access_key_id"] = self.access_key_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.secret_access_key is not None:
+            body["secret_access_key"] = self.secret_access_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CloudflareApiToken into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.access_key_id is not None:
+            body["access_key_id"] = self.access_key_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.secret_access_key is not None:
+            body["secret_access_key"] = self.secret_access_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudflareApiToken:
         """Deserializes the CloudflareApiToken from a dictionary."""
-        return cls(access_key_id=d.get('access_key_id', None),
-                   account_id=d.get('account_id', None),
-                   secret_access_key=d.get('secret_access_key', None))
+        return cls(
+            access_key_id=d.get("access_key_id", None),
+            account_id=d.get("account_id", None),
+            secret_access_key=d.get("secret_access_key", None),
+        )
 
 
 @dataclass
@@ -985,52 +1165,78 @@ class ColumnInfo:
     def as_dict(self) -> dict:
         """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.mask: body['mask'] = self.mask.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.nullable is not None: body['nullable'] = self.nullable
-        if self.partition_index is not None: body['partition_index'] = self.partition_index
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_json is not None: body['type_json'] = self.type_json
-        if self.type_name is not None: body['type_name'] = self.type_name.value
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.mask:
+            body["mask"] = self.mask.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.nullable is not None:
+            body["nullable"] = self.nullable
+        if self.partition_index is not None:
+            body["partition_index"] = self.partition_index
+        if self.position is not None:
+            body["position"] = self.position
+        if self.type_interval_type is not None:
+            body["type_interval_type"] = self.type_interval_type
+        if self.type_json is not None:
+            body["type_json"] = self.type_json
+        if self.type_name is not None:
+            body["type_name"] = self.type_name.value
+        if self.type_precision is not None:
+            body["type_precision"] = self.type_precision
+        if self.type_scale is not None:
+            body["type_scale"] = self.type_scale
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.mask: body['mask'] = self.mask
-        if self.name is not None: body['name'] = self.name
-        if self.nullable is not None: body['nullable'] = self.nullable
-        if self.partition_index is not None: body['partition_index'] = self.partition_index
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_json is not None: body['type_json'] = self.type_json
-        if self.type_name is not None: body['type_name'] = self.type_name
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.mask:
+            body["mask"] = self.mask
+        if self.name is not None:
+            body["name"] = self.name
+        if self.nullable is not None:
+            body["nullable"] = self.nullable
+        if self.partition_index is not None:
+            body["partition_index"] = self.partition_index
+        if self.position is not None:
+            body["position"] = self.position
+        if self.type_interval_type is not None:
+            body["type_interval_type"] = self.type_interval_type
+        if self.type_json is not None:
+            body["type_json"] = self.type_json
+        if self.type_name is not None:
+            body["type_name"] = self.type_name
+        if self.type_precision is not None:
+            body["type_precision"] = self.type_precision
+        if self.type_scale is not None:
+            body["type_scale"] = self.type_scale
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   mask=_from_dict(d, 'mask', ColumnMask),
-                   name=d.get('name', None),
-                   nullable=d.get('nullable', None),
-                   partition_index=d.get('partition_index', None),
-                   position=d.get('position', None),
-                   type_interval_type=d.get('type_interval_type', None),
-                   type_json=d.get('type_json', None),
-                   type_name=_enum(d, 'type_name', ColumnTypeName),
-                   type_precision=d.get('type_precision', None),
-                   type_scale=d.get('type_scale', None),
-                   type_text=d.get('type_text', None))
+        return cls(
+            comment=d.get("comment", None),
+            mask=_from_dict(d, "mask", ColumnMask),
+            name=d.get("name", None),
+            nullable=d.get("nullable", None),
+            partition_index=d.get("partition_index", None),
+            position=d.get("position", None),
+            type_interval_type=d.get("type_interval_type", None),
+            type_json=d.get("type_json", None),
+            type_name=_enum(d, "type_name", ColumnTypeName),
+            type_precision=d.get("type_precision", None),
+            type_scale=d.get("type_scale", None),
+            type_text=d.get("type_text", None),
+        )
 
 
 @dataclass
@@ -1046,48 +1252,54 @@ class ColumnMask:
     def as_dict(self) -> dict:
         """Serializes the ColumnMask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.function_name is not None: body['function_name'] = self.function_name
-        if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names]
+        if self.function_name is not None:
+            body["function_name"] = self.function_name
+        if self.using_column_names:
+            body["using_column_names"] = [v for v in self.using_column_names]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ColumnMask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.function_name is not None: body['function_name'] = self.function_name
-        if self.using_column_names: body['using_column_names'] = self.using_column_names
+        if self.function_name is not None:
+            body["function_name"] = self.function_name
+        if self.using_column_names:
+            body["using_column_names"] = self.using_column_names
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
         """Deserializes the ColumnMask from a dictionary."""
-        return cls(function_name=d.get('function_name', None),
-                   using_column_names=d.get('using_column_names', None))
+        return cls(
+            function_name=d.get("function_name", None),
+            using_column_names=d.get("using_column_names", None),
+        )
 
 
 class ColumnTypeName(Enum):
 
-    ARRAY = 'ARRAY'
-    BINARY = 'BINARY'
-    BOOLEAN = 'BOOLEAN'
-    BYTE = 'BYTE'
-    CHAR = 'CHAR'
-    DATE = 'DATE'
-    DECIMAL = 'DECIMAL'
-    DOUBLE = 'DOUBLE'
-    FLOAT = 'FLOAT'
-    INT = 'INT'
-    INTERVAL = 'INTERVAL'
-    LONG = 'LONG'
-    MAP = 'MAP'
-    NULL = 'NULL'
-    SHORT = 'SHORT'
-    STRING = 'STRING'
-    STRUCT = 'STRUCT'
-    TABLE_TYPE = 'TABLE_TYPE'
-    TIMESTAMP = 'TIMESTAMP'
-    TIMESTAMP_NTZ = 'TIMESTAMP_NTZ'
-    USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
-    VARIANT = 'VARIANT'
+    ARRAY = "ARRAY"
+    BINARY = "BINARY"
+    BOOLEAN = "BOOLEAN"
+    BYTE = "BYTE"
+    CHAR = "CHAR"
+    DATE = "DATE"
+    DECIMAL = "DECIMAL"
+    DOUBLE = "DOUBLE"
+    FLOAT = "FLOAT"
+    INT = "INT"
+    INTERVAL = "INTERVAL"
+    LONG = "LONG"
+    MAP = "MAP"
+    NULL = "NULL"
+    SHORT = "SHORT"
+    STRING = "STRING"
+    STRUCT = "STRUCT"
+    TABLE_TYPE = "TABLE_TYPE"
+    TIMESTAMP = "TIMESTAMP"
+    TIMESTAMP_NTZ = "TIMESTAMP_NTZ"
+    USER_DEFINED_TYPE = "USER_DEFINED_TYPE"
+    VARIANT = "VARIANT"
 
 
 @dataclass
@@ -1148,86 +1360,124 @@ class ConnectionInfo:
     def as_dict(self) -> dict:
         """Serializes the ConnectionInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_id is not None: body['connection_id'] = self.connection_id
-        if self.connection_type is not None: body['connection_type'] = self.connection_type.value
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.credential_type is not None: body['credential_type'] = self.credential_type.value
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
-        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict()
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.url is not None: body['url'] = self.url
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_id is not None:
+            body["connection_id"] = self.connection_id
+        if self.connection_type is not None:
+            body["connection_type"] = self.connection_type.value
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.credential_type is not None:
+            body["credential_type"] = self.credential_type.value
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
+        if self.provisioning_info:
+            body["provisioning_info"] = self.provisioning_info.as_dict()
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ConnectionInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_id is not None: body['connection_id'] = self.connection_id
-        if self.connection_type is not None: body['connection_type'] = self.connection_type
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.credential_type is not None: body['credential_type'] = self.credential_type
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
-        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.url is not None: body['url'] = self.url
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_id is not None:
+            body["connection_id"] = self.connection_id
+        if self.connection_type is not None:
+            body["connection_type"] = self.connection_type
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.credential_type is not None:
+            body["credential_type"] = self.credential_type
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
+        if self.provisioning_info:
+            body["provisioning_info"] = self.provisioning_info
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConnectionInfo:
         """Deserializes the ConnectionInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   connection_id=d.get('connection_id', None),
-                   connection_type=_enum(d, 'connection_type', ConnectionType),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   credential_type=_enum(d, 'credential_type', CredentialType),
-                   full_name=d.get('full_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   owner=d.get('owner', None),
-                   properties=d.get('properties', None),
-                   provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo),
-                   read_only=d.get('read_only', None),
-                   securable_type=d.get('securable_type', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   url=d.get('url', None))
+        return cls(
+            comment=d.get("comment", None),
+            connection_id=d.get("connection_id", None),
+            connection_type=_enum(d, "connection_type", ConnectionType),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            credential_type=_enum(d, "credential_type", CredentialType),
+            full_name=d.get("full_name", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            owner=d.get("owner", None),
+            properties=d.get("properties", None),
+            provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo),
+            read_only=d.get("read_only", None),
+            securable_type=d.get("securable_type", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            url=d.get("url", None),
+        )
 
 
 class ConnectionType(Enum):
     """The type of connection."""
 
-    BIGQUERY = 'BIGQUERY'
-    DATABRICKS = 'DATABRICKS'
-    GLUE = 'GLUE'
-    HIVE_METASTORE = 'HIVE_METASTORE'
-    HTTP = 'HTTP'
-    MYSQL = 'MYSQL'
-    POSTGRESQL = 'POSTGRESQL'
-    REDSHIFT = 'REDSHIFT'
-    SNOWFLAKE = 'SNOWFLAKE'
-    SQLDW = 'SQLDW'
-    SQLSERVER = 'SQLSERVER'
+    BIGQUERY = "BIGQUERY"
+    DATABRICKS = "DATABRICKS"
+    GLUE = "GLUE"
+    HIVE_METASTORE = "HIVE_METASTORE"
+    HTTP = "HTTP"
+    MYSQL = "MYSQL"
+    POSTGRESQL = "POSTGRESQL"
+    REDSHIFT = "REDSHIFT"
+    SNOWFLAKE = "SNOWFLAKE"
+    SQLDW = "SQLDW"
+    SQLSERVER = "SQLSERVER"
 
 
 @dataclass
@@ -1250,29 +1500,32 @@ def as_dict(self) -> dict:
         """Serializes the ContinuousUpdateStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.initial_pipeline_sync_progress:
-            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict()
+            body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict()
         if self.last_processed_commit_version is not None:
-            body['last_processed_commit_version'] = self.last_processed_commit_version
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+            body["last_processed_commit_version"] = self.last_processed_commit_version
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ContinuousUpdateStatus into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.initial_pipeline_sync_progress:
-            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress
+            body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress
         if self.last_processed_commit_version is not None:
-            body['last_processed_commit_version'] = self.last_processed_commit_version
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+            body["last_processed_commit_version"] = self.last_processed_commit_version
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContinuousUpdateStatus:
         """Deserializes the ContinuousUpdateStatus from a dictionary."""
-        return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress',
-                                                             PipelineProgress),
-                   last_processed_commit_version=d.get('last_processed_commit_version', None),
-                   timestamp=d.get('timestamp', None))
+        return cls(
+            initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress),
+            last_processed_commit_version=d.get("last_processed_commit_version", None),
+            timestamp=d.get("timestamp", None),
+        )
 
 
 @dataclass
@@ -1306,40 +1559,58 @@ class CreateCatalog:
     def as_dict(self) -> dict:
         """Serializes the CreateCatalog into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.properties: body['properties'] = self.properties
-        if self.provider_name is not None: body['provider_name'] = self.provider_name
-        if self.share_name is not None: body['share_name'] = self.share_name
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.properties:
+            body["properties"] = self.properties
+        if self.provider_name is not None:
+            body["provider_name"] = self.provider_name
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.properties: body['properties'] = self.properties
-        if self.provider_name is not None: body['provider_name'] = self.provider_name
-        if self.share_name is not None: body['share_name'] = self.share_name
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.properties:
+            body["properties"] = self.properties
+        if self.provider_name is not None:
+            body["provider_name"] = self.provider_name
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCatalog:
         """Deserializes the CreateCatalog from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   connection_name=d.get('connection_name', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   properties=d.get('properties', None),
-                   provider_name=d.get('provider_name', None),
-                   share_name=d.get('share_name', None),
-                   storage_root=d.get('storage_root', None))
+        return cls(
+            comment=d.get("comment", None),
+            connection_name=d.get("connection_name", None),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            properties=d.get("properties", None),
+            provider_name=d.get("provider_name", None),
+            share_name=d.get("share_name", None),
+            storage_root=d.get("storage_root", None),
+        )
 
 
 @dataclass
@@ -1365,34 +1636,48 @@ class CreateConnection:
     def as_dict(self) -> dict:
         """Serializes the CreateConnection into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_type is not None: body['connection_type'] = self.connection_type.value
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.properties: body['properties'] = self.properties
-        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_type is not None:
+            body["connection_type"] = self.connection_type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.properties:
+            body["properties"] = self.properties
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateConnection into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.connection_type is not None: body['connection_type'] = self.connection_type
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.properties: body['properties'] = self.properties
-        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.connection_type is not None:
+            body["connection_type"] = self.connection_type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.properties:
+            body["properties"] = self.properties
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
         """Deserializes the CreateConnection from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   connection_type=_enum(d, 'connection_type', ConnectionType),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   properties=d.get('properties', None),
-                   read_only=d.get('read_only', None))
+        return cls(
+            comment=d.get("comment", None),
+            connection_type=_enum(d, "connection_type", ConnectionType),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            properties=d.get("properties", None),
+            read_only=d.get("read_only", None),
+        )
 
 
 @dataclass
@@ -1429,47 +1714,67 @@ class CreateCredentialRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.purpose is not None: body['purpose'] = self.purpose.value
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.purpose is not None:
+            body["purpose"] = self.purpose.value
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.comment is not None: body['comment'] = self.comment
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
-        if self.name is not None: body['name'] = self.name
-        if self.purpose is not None: body['purpose'] = self.purpose
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
+        if self.name is not None:
+            body["name"] = self.name
+        if self.purpose is not None:
+            body["purpose"] = self.purpose
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
         """Deserializes the CreateCredentialRequest from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   comment=d.get('comment', None),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccount),
-                   name=d.get('name', None),
-                   purpose=_enum(d, 'purpose', CredentialPurpose),
-                   read_only=d.get('read_only', None),
-                   skip_validation=d.get('skip_validation', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            comment=d.get("comment", None),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccount,
+            ),
+            name=d.get("name", None),
+            purpose=_enum(d, "purpose", CredentialPurpose),
+            read_only=d.get("read_only", None),
+            skip_validation=d.get("skip_validation", None),
+        )
 
 
 @dataclass
@@ -1506,43 +1811,63 @@ class CreateExternalLocation:
     def as_dict(self) -> dict:
         """Serializes the CreateExternalLocation into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.comment is not None: body['comment'] = self.comment
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
-        if self.fallback is not None: body['fallback'] = self.fallback
-        if self.name is not None: body['name'] = self.name
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
-        if self.url is not None: body['url'] = self.url
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details.as_dict()
+        if self.fallback is not None:
+            body["fallback"] = self.fallback
+        if self.name is not None:
+            body["name"] = self.name
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.comment is not None: body['comment'] = self.comment
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.encryption_details: body['encryption_details'] = self.encryption_details
-        if self.fallback is not None: body['fallback'] = self.fallback
-        if self.name is not None: body['name'] = self.name
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
-        if self.url is not None: body['url'] = self.url
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details
+        if self.fallback is not None:
+            body["fallback"] = self.fallback
+        if self.name is not None:
+            body["name"] = self.name
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExternalLocation:
         """Deserializes the CreateExternalLocation from a dictionary."""
-        return cls(access_point=d.get('access_point', None),
-                   comment=d.get('comment', None),
-                   credential_name=d.get('credential_name', None),
-                   encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
-                   fallback=d.get('fallback', None),
-                   name=d.get('name', None),
-                   read_only=d.get('read_only', None),
-                   skip_validation=d.get('skip_validation', None),
-                   url=d.get('url', None))
+        return cls(
+            access_point=d.get("access_point", None),
+            comment=d.get("comment", None),
+            credential_name=d.get("credential_name", None),
+            encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
+            fallback=d.get("fallback", None),
+            name=d.get("name", None),
+            read_only=d.get("read_only", None),
+            skip_validation=d.get("skip_validation", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -1615,85 +1940,129 @@ class CreateFunction:
     def as_dict(self) -> dict:
         """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.data_type is not None: body['data_type'] = self.data_type.value
-        if self.external_language is not None: body['external_language'] = self.external_language
-        if self.external_name is not None: body['external_name'] = self.external_name
-        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
-        if self.input_params: body['input_params'] = self.input_params.as_dict()
-        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
-        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
-        if self.name is not None: body['name'] = self.name
-        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value
-        if self.properties is not None: body['properties'] = self.properties
-        if self.return_params: body['return_params'] = self.return_params.as_dict()
-        if self.routine_body is not None: body['routine_body'] = self.routine_body.value
-        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
-        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict()
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.security_type is not None: body['security_type'] = self.security_type.value
-        if self.specific_name is not None: body['specific_name'] = self.specific_name
-        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access.value
-        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.data_type is not None:
+            body["data_type"] = self.data_type.value
+        if self.external_language is not None:
+            body["external_language"] = self.external_language
+        if self.external_name is not None:
+            body["external_name"] = self.external_name
+        if self.full_data_type is not None:
+            body["full_data_type"] = self.full_data_type
+        if self.input_params:
+            body["input_params"] = self.input_params.as_dict()
+        if self.is_deterministic is not None:
+            body["is_deterministic"] = self.is_deterministic
+        if self.is_null_call is not None:
+            body["is_null_call"] = self.is_null_call
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parameter_style is not None:
+            body["parameter_style"] = self.parameter_style.value
+        if self.properties is not None:
+            body["properties"] = self.properties
+        if self.return_params:
+            body["return_params"] = self.return_params.as_dict()
+        if self.routine_body is not None:
+            body["routine_body"] = self.routine_body.value
+        if self.routine_definition is not None:
+            body["routine_definition"] = self.routine_definition
+        if self.routine_dependencies:
+            body["routine_dependencies"] = self.routine_dependencies.as_dict()
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.security_type is not None:
+            body["security_type"] = self.security_type.value
+        if self.specific_name is not None:
+            body["specific_name"] = self.specific_name
+        if self.sql_data_access is not None:
+            body["sql_data_access"] = self.sql_data_access.value
+        if self.sql_path is not None:
+            body["sql_path"] = self.sql_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateFunction into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.data_type is not None: body['data_type'] = self.data_type
-        if self.external_language is not None: body['external_language'] = self.external_language
-        if self.external_name is not None: body['external_name'] = self.external_name
-        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
-        if self.input_params: body['input_params'] = self.input_params
-        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
-        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
-        if self.name is not None: body['name'] = self.name
-        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style
-        if self.properties is not None: body['properties'] = self.properties
-        if self.return_params: body['return_params'] = self.return_params
-        if self.routine_body is not None: body['routine_body'] = self.routine_body
-        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
-        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.security_type is not None: body['security_type'] = self.security_type
-        if self.specific_name is not None: body['specific_name'] = self.specific_name
-        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access
-        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.data_type is not None:
+            body["data_type"] = self.data_type
+        if self.external_language is not None:
+            body["external_language"] = self.external_language
+        if self.external_name is not None:
+            body["external_name"] = self.external_name
+        if self.full_data_type is not None:
+            body["full_data_type"] = self.full_data_type
+        if self.input_params:
+            body["input_params"] = self.input_params
+        if self.is_deterministic is not None:
+            body["is_deterministic"] = self.is_deterministic
+        if self.is_null_call is not None:
+            body["is_null_call"] = self.is_null_call
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parameter_style is not None:
+            body["parameter_style"] = self.parameter_style
+        if self.properties is not None:
+            body["properties"] = self.properties
+        if self.return_params:
+            body["return_params"] = self.return_params
+        if self.routine_body is not None:
+            body["routine_body"] = self.routine_body
+        if self.routine_definition is not None:
+            body["routine_definition"] = self.routine_definition
+        if self.routine_dependencies:
+            body["routine_dependencies"] = self.routine_dependencies
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.security_type is not None:
+            body["security_type"] = self.security_type
+        if self.specific_name is not None:
+            body["specific_name"] = self.specific_name
+        if self.sql_data_access is not None:
+            body["sql_data_access"] = self.sql_data_access
+        if self.sql_path is not None:
+            body["sql_path"] = self.sql_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFunction:
         """Deserializes the CreateFunction from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   data_type=_enum(d, 'data_type', ColumnTypeName),
-                   external_language=d.get('external_language', None),
-                   external_name=d.get('external_name', None),
-                   full_data_type=d.get('full_data_type', None),
-                   input_params=_from_dict(d, 'input_params', FunctionParameterInfos),
-                   is_deterministic=d.get('is_deterministic', None),
-                   is_null_call=d.get('is_null_call', None),
-                   name=d.get('name', None),
-                   parameter_style=_enum(d, 'parameter_style', CreateFunctionParameterStyle),
-                   properties=d.get('properties', None),
-                   return_params=_from_dict(d, 'return_params', FunctionParameterInfos),
-                   routine_body=_enum(d, 'routine_body', CreateFunctionRoutineBody),
-                   routine_definition=d.get('routine_definition', None),
-                   routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList),
-                   schema_name=d.get('schema_name', None),
-                   security_type=_enum(d, 'security_type', CreateFunctionSecurityType),
-                   specific_name=d.get('specific_name', None),
-                   sql_data_access=_enum(d, 'sql_data_access', CreateFunctionSqlDataAccess),
-                   sql_path=d.get('sql_path', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            data_type=_enum(d, "data_type", ColumnTypeName),
+            external_language=d.get("external_language", None),
+            external_name=d.get("external_name", None),
+            full_data_type=d.get("full_data_type", None),
+            input_params=_from_dict(d, "input_params", FunctionParameterInfos),
+            is_deterministic=d.get("is_deterministic", None),
+            is_null_call=d.get("is_null_call", None),
+            name=d.get("name", None),
+            parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle),
+            properties=d.get("properties", None),
+            return_params=_from_dict(d, "return_params", FunctionParameterInfos),
+            routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody),
+            routine_definition=d.get("routine_definition", None),
+            routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList),
+            schema_name=d.get("schema_name", None),
+            security_type=_enum(d, "security_type", CreateFunctionSecurityType),
+            specific_name=d.get("specific_name", None),
+            sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess),
+            sql_path=d.get("sql_path", None),
+        )
 
 
 class CreateFunctionParameterStyle(Enum):
     """Function parameter style. **S** is the value for SQL."""
 
-    S = 'S'
+    S = "S"
 
 
 @dataclass
@@ -1704,19 +2073,21 @@ class CreateFunctionRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateFunctionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.function_info: body['function_info'] = self.function_info.as_dict()
+        if self.function_info:
+            body["function_info"] = self.function_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.function_info: body['function_info'] = self.function_info
+        if self.function_info:
+            body["function_info"] = self.function_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFunctionRequest:
         """Deserializes the CreateFunctionRequest from a dictionary."""
-        return cls(function_info=_from_dict(d, 'function_info', CreateFunction))
+        return cls(function_info=_from_dict(d, "function_info", CreateFunction))
 
 
 class CreateFunctionRoutineBody(Enum):
@@ -1725,22 +2096,22 @@ class CreateFunctionRoutineBody(Enum):
     be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be
     **NO_SQL**."""
 
-    EXTERNAL = 'EXTERNAL'
-    SQL = 'SQL'
+    EXTERNAL = "EXTERNAL"
+    SQL = "SQL"
 
 
 class CreateFunctionSecurityType(Enum):
     """The security type of the function."""
 
-    DEFINER = 'DEFINER'
+    DEFINER = "DEFINER"
 
 
 class CreateFunctionSqlDataAccess(Enum):
     """Function SQL data access."""
 
-    CONTAINS_SQL = 'CONTAINS_SQL'
-    NO_SQL = 'NO_SQL'
-    READS_SQL_DATA = 'READS_SQL_DATA'
+    CONTAINS_SQL = "CONTAINS_SQL"
+    NO_SQL = "NO_SQL"
+    READS_SQL_DATA = "READS_SQL_DATA"
 
 
 @dataclass
@@ -1759,25 +2130,33 @@ class CreateMetastore:
     def as_dict(self) -> dict:
         """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.region is not None: body['region'] = self.region
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.name is not None:
+            body["name"] = self.name
+        if self.region is not None:
+            body["region"] = self.region
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.region is not None: body['region'] = self.region
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.name is not None:
+            body["name"] = self.name
+        if self.region is not None:
+            body["region"] = self.region
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMetastore:
         """Deserializes the CreateMetastore from a dictionary."""
-        return cls(name=d.get('name', None),
-                   region=d.get('region', None),
-                   storage_root=d.get('storage_root', None))
+        return cls(
+            name=d.get("name", None),
+            region=d.get("region", None),
+            storage_root=d.get("storage_root", None),
+        )
 
 
 @dataclass
@@ -1795,25 +2174,33 @@ class CreateMetastoreAssignment:
     def as_dict(self) -> dict:
         """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.default_catalog_name is not None:
+            body["default_catalog_name"] = self.default_catalog_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.default_catalog_name is not None:
+            body["default_catalog_name"] = self.default_catalog_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMetastoreAssignment:
         """Deserializes the CreateMetastoreAssignment from a dictionary."""
-        return cls(default_catalog_name=d.get('default_catalog_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            default_catalog_name=d.get("default_catalog_name", None),
+            metastore_id=d.get("metastore_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -1869,63 +2256,92 @@ class CreateMonitor:
     def as_dict(self) -> dict:
         """Serializes the CreateMonitor into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
-        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
-        if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics]
+        if self.assets_dir is not None:
+            body["assets_dir"] = self.assets_dir
+        if self.baseline_table_name is not None:
+            body["baseline_table_name"] = self.baseline_table_name
+        if self.custom_metrics:
+            body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics]
         if self.data_classification_config:
-            body['data_classification_config'] = self.data_classification_config.as_dict()
-        if self.inference_log: body['inference_log'] = self.inference_log.as_dict()
-        if self.notifications: body['notifications'] = self.notifications.as_dict()
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
+            body["data_classification_config"] = self.data_classification_config.as_dict()
+        if self.inference_log:
+            body["inference_log"] = self.inference_log.as_dict()
+        if self.notifications:
+            body["notifications"] = self.notifications.as_dict()
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
         if self.skip_builtin_dashboard is not None:
-            body['skip_builtin_dashboard'] = self.skip_builtin_dashboard
-        if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
-        if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.time_series: body['time_series'] = self.time_series.as_dict()
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["skip_builtin_dashboard"] = self.skip_builtin_dashboard
+        if self.slicing_exprs:
+            body["slicing_exprs"] = [v for v in self.slicing_exprs]
+        if self.snapshot:
+            body["snapshot"] = self.snapshot.as_dict()
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.time_series:
+            body["time_series"] = self.time_series.as_dict()
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
-        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
-        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.assets_dir is not None:
+            body["assets_dir"] = self.assets_dir
+        if self.baseline_table_name is not None:
+            body["baseline_table_name"] = self.baseline_table_name
+        if self.custom_metrics:
+            body["custom_metrics"] = self.custom_metrics
         if self.data_classification_config:
-            body['data_classification_config'] = self.data_classification_config
-        if self.inference_log: body['inference_log'] = self.inference_log
-        if self.notifications: body['notifications'] = self.notifications
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
-        if self.schedule: body['schedule'] = self.schedule
+            body["data_classification_config"] = self.data_classification_config
+        if self.inference_log:
+            body["inference_log"] = self.inference_log
+        if self.notifications:
+            body["notifications"] = self.notifications
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
+        if self.schedule:
+            body["schedule"] = self.schedule
         if self.skip_builtin_dashboard is not None:
-            body['skip_builtin_dashboard'] = self.skip_builtin_dashboard
-        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
-        if self.snapshot: body['snapshot'] = self.snapshot
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.time_series: body['time_series'] = self.time_series
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["skip_builtin_dashboard"] = self.skip_builtin_dashboard
+        if self.slicing_exprs:
+            body["slicing_exprs"] = self.slicing_exprs
+        if self.snapshot:
+            body["snapshot"] = self.snapshot
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.time_series:
+            body["time_series"] = self.time_series
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMonitor:
         """Deserializes the CreateMonitor from a dictionary."""
-        return cls(assets_dir=d.get('assets_dir', None),
-                   baseline_table_name=d.get('baseline_table_name', None),
-                   custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric),
-                   data_classification_config=_from_dict(d, 'data_classification_config',
-                                                         MonitorDataClassificationConfig),
-                   inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog),
-                   notifications=_from_dict(d, 'notifications', MonitorNotifications),
-                   output_schema_name=d.get('output_schema_name', None),
-                   schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
-                   skip_builtin_dashboard=d.get('skip_builtin_dashboard', None),
-                   slicing_exprs=d.get('slicing_exprs', None),
-                   snapshot=_from_dict(d, 'snapshot', MonitorSnapshot),
-                   table_name=d.get('table_name', None),
-                   time_series=_from_dict(d, 'time_series', MonitorTimeSeries),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            assets_dir=d.get("assets_dir", None),
+            baseline_table_name=d.get("baseline_table_name", None),
+            custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric),
+            data_classification_config=_from_dict(
+                d,
+                "data_classification_config",
+                MonitorDataClassificationConfig,
+            ),
+            inference_log=_from_dict(d, "inference_log", MonitorInferenceLog),
+            notifications=_from_dict(d, "notifications", MonitorNotifications),
+            output_schema_name=d.get("output_schema_name", None),
+            schedule=_from_dict(d, "schedule", MonitorCronSchedule),
+            skip_builtin_dashboard=d.get("skip_builtin_dashboard", None),
+            slicing_exprs=d.get("slicing_exprs", None),
+            snapshot=_from_dict(d, "snapshot", MonitorSnapshot),
+            table_name=d.get("table_name", None),
+            time_series=_from_dict(d, "time_series", MonitorTimeSeries),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -1948,31 +2364,43 @@ class CreateRegisteredModelRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateRegisteredModelRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRegisteredModelRequest:
         """Deserializes the CreateRegisteredModelRequest from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   schema_name=d.get('schema_name', None),
-                   storage_location=d.get('storage_location', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            schema_name=d.get("schema_name", None),
+            storage_location=d.get("storage_location", None),
+        )
 
 
 @dataclass
@@ -2014,31 +2442,43 @@ class CreateSchema:
     def as_dict(self) -> dict:
         """Serializes the CreateSchema into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.properties: body['properties'] = self.properties
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.properties:
+            body["properties"] = self.properties
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateSchema into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.properties: body['properties'] = self.properties
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.properties:
+            body["properties"] = self.properties
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateSchema:
         """Deserializes the CreateSchema from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   properties=d.get('properties', None),
-                   storage_root=d.get('storage_root', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            properties=d.get("properties", None),
+            storage_root=d.get("storage_root", None),
+        )
 
 
 @dataclass
@@ -2073,48 +2513,67 @@ class CreateStorageCredential:
     def as_dict(self) -> dict:
         """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
-        if self.comment is not None: body['comment'] = self.comment
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
-        if self.name is not None: body['name'] = self.name
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
+        if self.name is not None:
+            body["name"] = self.name
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateStorageCredential:
         """Deserializes the CreateStorageCredential from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity',
-                                                     AzureManagedIdentityRequest),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
-                   comment=d.get('comment', None),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccountRequest),
-                   name=d.get('name', None),
-                   read_only=d.get('read_only', None),
-                   skip_validation=d.get('skip_validation', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken),
+            comment=d.get("comment", None),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccountRequest,
+            ),
+            name=d.get("name", None),
+            read_only=d.get("read_only", None),
+            skip_validation=d.get("skip_validation", None),
+        )
 
 
 @dataclass
@@ -2129,22 +2588,28 @@ class CreateTableConstraint:
     def as_dict(self) -> dict:
         """Serializes the CreateTableConstraint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.constraint: body['constraint'] = self.constraint.as_dict()
-        if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg
+        if self.constraint:
+            body["constraint"] = self.constraint.as_dict()
+        if self.full_name_arg is not None:
+            body["full_name_arg"] = self.full_name_arg
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.constraint: body['constraint'] = self.constraint
-        if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg
+        if self.constraint:
+            body["constraint"] = self.constraint
+        if self.full_name_arg is not None:
+            body["full_name_arg"] = self.full_name_arg
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTableConstraint:
         """Deserializes the CreateTableConstraint from a dictionary."""
-        return cls(constraint=_from_dict(d, 'constraint', TableConstraint),
-                   full_name_arg=d.get('full_name_arg', None))
+        return cls(
+            constraint=_from_dict(d, "constraint", TableConstraint),
+            full_name_arg=d.get("full_name_arg", None),
+        )
 
 
 @dataclass
@@ -2169,34 +2634,48 @@ class CreateVolumeRequestContent:
     def as_dict(self) -> dict:
         """Serializes the CreateVolumeRequestContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.volume_type is not None: body['volume_type'] = self.volume_type.value
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.volume_type is not None:
+            body["volume_type"] = self.volume_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.volume_type is not None: body['volume_type'] = self.volume_type
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.volume_type is not None:
+            body["volume_type"] = self.volume_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
         """Deserializes the CreateVolumeRequestContent from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   schema_name=d.get('schema_name', None),
-                   storage_location=d.get('storage_location', None),
-                   volume_type=_enum(d, 'volume_type', VolumeType))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            schema_name=d.get("schema_name", None),
+            storage_location=d.get("storage_location", None),
+            volume_type=_enum(d, "volume_type", VolumeType),
+        )
 
 
 @dataclass
@@ -2261,89 +2740,125 @@ class CredentialInfo:
     def as_dict(self) -> dict:
         """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.id is not None: body['id'] = self.id
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.purpose is not None: body['purpose'] = self.purpose.value
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.purpose is not None:
+            body["purpose"] = self.purpose.value
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         if self.used_for_managed_storage is not None:
-            body['used_for_managed_storage'] = self.used_for_managed_storage
+            body["used_for_managed_storage"] = self.used_for_managed_storage
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.id is not None: body['id'] = self.id
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.purpose is not None: body['purpose'] = self.purpose
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.purpose is not None:
+            body["purpose"] = self.purpose
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         if self.used_for_managed_storage is not None:
-            body['used_for_managed_storage'] = self.used_for_managed_storage
+            body["used_for_managed_storage"] = self.used_for_managed_storage
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
         """Deserializes the CredentialInfo from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccount),
-                   full_name=d.get('full_name', None),
-                   id=d.get('id', None),
-                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   purpose=_enum(d, 'purpose', CredentialPurpose),
-                   read_only=d.get('read_only', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   used_for_managed_storage=d.get('used_for_managed_storage', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccount,
+            ),
+            full_name=d.get("full_name", None),
+            id=d.get("id", None),
+            isolation_mode=_enum(d, "isolation_mode", IsolationMode),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            purpose=_enum(d, "purpose", CredentialPurpose),
+            read_only=d.get("read_only", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            used_for_managed_storage=d.get("used_for_managed_storage", None),
+        )
 
 
 class CredentialPurpose(Enum):
 
-    SERVICE = 'SERVICE'
-    STORAGE = 'STORAGE'
+    SERVICE = "SERVICE"
+    STORAGE = "STORAGE"
 
 
 class CredentialType(Enum):
     """The type of credential."""
 
-    BEARER_TOKEN = 'BEARER_TOKEN'
-    USERNAME_PASSWORD = 'USERNAME_PASSWORD'
+    BEARER_TOKEN = "BEARER_TOKEN"
+    USERNAME_PASSWORD = "USERNAME_PASSWORD"
 
 
 @dataclass
@@ -2357,21 +2872,28 @@ class CredentialValidationResult:
     def as_dict(self) -> dict:
         """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.result is not None: body['result'] = self.result.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.result is not None:
+            body["result"] = self.result.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.result is not None: body['result'] = self.result
+        if self.message is not None:
+            body["message"] = self.message
+        if self.result is not None:
+            body["result"] = self.result
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialValidationResult:
         """Deserializes the CredentialValidationResult from a dictionary."""
-        return cls(message=d.get('message', None), result=_enum(d, 'result', ValidateCredentialResult))
+        return cls(
+            message=d.get("message", None),
+            result=_enum(d, "result", ValidateCredentialResult),
+        )
 
 
 @dataclass
@@ -2384,47 +2906,49 @@ class CurrentWorkspaceBindings:
     def as_dict(self) -> dict:
         """Serializes the CurrentWorkspaceBindings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.workspaces: body['workspaces'] = [v for v in self.workspaces]
+        if self.workspaces:
+            body["workspaces"] = [v for v in self.workspaces]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CurrentWorkspaceBindings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.workspaces: body['workspaces'] = self.workspaces
+        if self.workspaces:
+            body["workspaces"] = self.workspaces
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CurrentWorkspaceBindings:
         """Deserializes the CurrentWorkspaceBindings from a dictionary."""
-        return cls(workspaces=d.get('workspaces', None))
+        return cls(workspaces=d.get("workspaces", None))
 
 
 class DataSourceFormat(Enum):
     """Data source format"""
 
-    AVRO = 'AVRO'
-    BIGQUERY_FORMAT = 'BIGQUERY_FORMAT'
-    CSV = 'CSV'
-    DATABRICKS_FORMAT = 'DATABRICKS_FORMAT'
-    DELTA = 'DELTA'
-    DELTASHARING = 'DELTASHARING'
-    HIVE_CUSTOM = 'HIVE_CUSTOM'
-    HIVE_SERDE = 'HIVE_SERDE'
-    JSON = 'JSON'
-    MYSQL_FORMAT = 'MYSQL_FORMAT'
-    NETSUITE_FORMAT = 'NETSUITE_FORMAT'
-    ORC = 'ORC'
-    PARQUET = 'PARQUET'
-    POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT'
-    REDSHIFT_FORMAT = 'REDSHIFT_FORMAT'
-    SALESFORCE_FORMAT = 'SALESFORCE_FORMAT'
-    SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT'
-    SQLDW_FORMAT = 'SQLDW_FORMAT'
-    SQLSERVER_FORMAT = 'SQLSERVER_FORMAT'
-    TEXT = 'TEXT'
-    UNITY_CATALOG = 'UNITY_CATALOG'
-    VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT'
-    WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT'
+    AVRO = "AVRO"
+    BIGQUERY_FORMAT = "BIGQUERY_FORMAT"
+    CSV = "CSV"
+    DATABRICKS_FORMAT = "DATABRICKS_FORMAT"
+    DELTA = "DELTA"
+    DELTASHARING = "DELTASHARING"
+    HIVE_CUSTOM = "HIVE_CUSTOM"
+    HIVE_SERDE = "HIVE_SERDE"
+    JSON = "JSON"
+    MYSQL_FORMAT = "MYSQL_FORMAT"
+    NETSUITE_FORMAT = "NETSUITE_FORMAT"
+    ORC = "ORC"
+    PARQUET = "PARQUET"
+    POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT"
+    REDSHIFT_FORMAT = "REDSHIFT_FORMAT"
+    SALESFORCE_FORMAT = "SALESFORCE_FORMAT"
+    SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT"
+    SQLDW_FORMAT = "SQLDW_FORMAT"
+    SQLSERVER_FORMAT = "SQLSERVER_FORMAT"
+    TEXT = "TEXT"
+    UNITY_CATALOG = "UNITY_CATALOG"
+    VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT"
+    WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT"
 
 
 @dataclass
@@ -2445,25 +2969,33 @@ class DatabricksGcpServiceAccount:
     def as_dict(self) -> dict:
         """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.email is not None: body['email'] = self.email
-        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.email is not None:
+            body["email"] = self.email
+        if self.private_key_id is not None:
+            body["private_key_id"] = self.private_key_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.email is not None: body['email'] = self.email
-        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.email is not None:
+            body["email"] = self.email
+        if self.private_key_id is not None:
+            body["private_key_id"] = self.private_key_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccount:
         """Deserializes the DatabricksGcpServiceAccount from a dictionary."""
-        return cls(credential_id=d.get('credential_id', None),
-                   email=d.get('email', None),
-                   private_key_id=d.get('private_key_id', None))
+        return cls(
+            credential_id=d.get("credential_id", None),
+            email=d.get("email", None),
+            private_key_id=d.get("private_key_id", None),
+        )
 
 
 @dataclass
@@ -2496,21 +3028,28 @@ class DatabricksGcpServiceAccountResponse:
     def as_dict(self) -> dict:
         """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.email is not None: body['email'] = self.email
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.email is not None:
+            body["email"] = self.email
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.email is not None: body['email'] = self.email
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.email is not None:
+            body["email"] = self.email
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountResponse:
         """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary."""
-        return cls(credential_id=d.get('credential_id', None), email=d.get('email', None))
+        return cls(
+            credential_id=d.get("credential_id", None),
+            email=d.get("email", None),
+        )
 
 
 @dataclass
@@ -2573,7 +3112,8 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
 @dataclass
 class DeltaRuntimePropertiesKvPairs:
     """Properties pertaining to the current state of the delta table as given by the commit server.
-    This does not contain **delta.*** (input) properties in __TableInfo.properties__."""
+    This does not contain **delta.*** (input) properties in __TableInfo.properties__.
+    """
 
     delta_runtime_properties: Dict[str, str]
     """A map of key-value properties attached to the securable."""
@@ -2581,19 +3121,21 @@ class DeltaRuntimePropertiesKvPairs:
     def as_dict(self) -> dict:
         """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties
+        if self.delta_runtime_properties:
+            body["delta_runtime_properties"] = self.delta_runtime_properties
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties
+        if self.delta_runtime_properties:
+            body["delta_runtime_properties"] = self.delta_runtime_properties
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaRuntimePropertiesKvPairs:
         """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary."""
-        return cls(delta_runtime_properties=d.get('delta_runtime_properties', None))
+        return cls(delta_runtime_properties=d.get("delta_runtime_properties", None))
 
 
 @dataclass
@@ -2610,22 +3152,28 @@ class Dependency:
     def as_dict(self) -> dict:
         """Serializes the Dependency into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.function: body['function'] = self.function.as_dict()
-        if self.table: body['table'] = self.table.as_dict()
+        if self.function:
+            body["function"] = self.function.as_dict()
+        if self.table:
+            body["table"] = self.table.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Dependency into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.function: body['function'] = self.function
-        if self.table: body['table'] = self.table
+        if self.function:
+            body["function"] = self.function
+        if self.table:
+            body["table"] = self.table
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dependency:
         """Deserializes the Dependency from a dictionary."""
-        return cls(function=_from_dict(d, 'function', FunctionDependency),
-                   table=_from_dict(d, 'table', TableDependency))
+        return cls(
+            function=_from_dict(d, "function", FunctionDependency),
+            table=_from_dict(d, "table", TableDependency),
+        )
 
 
 @dataclass
@@ -2638,19 +3186,21 @@ class DependencyList:
     def as_dict(self) -> dict:
         """Serializes the DependencyList into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies]
+        if self.dependencies:
+            body["dependencies"] = [v.as_dict() for v in self.dependencies]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DependencyList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dependencies: body['dependencies'] = self.dependencies
+        if self.dependencies:
+            body["dependencies"] = self.dependencies
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DependencyList:
         """Deserializes the DependencyList from a dictionary."""
-        return cls(dependencies=_repeated_dict(d, 'dependencies', Dependency))
+        return cls(dependencies=_repeated_dict(d, "dependencies", Dependency))
 
 
 @dataclass
@@ -2681,20 +3231,20 @@ def as_dict(self) -> dict:
         """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.privilege_assignments:
-            body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
+            body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        if self.privilege_assignments:
+            body["privilege_assignments"] = self.privilege_assignments
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePermissionsList:
         """Deserializes the EffectivePermissionsList from a dictionary."""
-        return cls(
-            privilege_assignments=_repeated_dict(d, 'privilege_assignments', EffectivePrivilegeAssignment))
+        return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment))
 
 
 @dataclass
@@ -2713,34 +3263,45 @@ class EffectivePredictiveOptimizationFlag:
     def as_dict(self) -> dict:
         """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
-        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type.value
-        if self.value is not None: body['value'] = self.value.value
+        if self.inherited_from_name is not None:
+            body["inherited_from_name"] = self.inherited_from_name
+        if self.inherited_from_type is not None:
+            body["inherited_from_type"] = self.inherited_from_type.value
+        if self.value is not None:
+            body["value"] = self.value.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
-        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type
-        if self.value is not None: body['value'] = self.value
+        if self.inherited_from_name is not None:
+            body["inherited_from_name"] = self.inherited_from_name
+        if self.inherited_from_type is not None:
+            body["inherited_from_type"] = self.inherited_from_type
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePredictiveOptimizationFlag:
         """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary."""
-        return cls(inherited_from_name=d.get('inherited_from_name', None),
-                   inherited_from_type=_enum(d, 'inherited_from_type',
-                                             EffectivePredictiveOptimizationFlagInheritedFromType),
-                   value=_enum(d, 'value', EnablePredictiveOptimization))
+        return cls(
+            inherited_from_name=d.get("inherited_from_name", None),
+            inherited_from_type=_enum(
+                d,
+                "inherited_from_type",
+                EffectivePredictiveOptimizationFlagInheritedFromType,
+            ),
+            value=_enum(d, "value", EnablePredictiveOptimization),
+        )
 
 
 class EffectivePredictiveOptimizationFlagInheritedFromType(Enum):
     """The type of the object from which the flag was inherited. If there was no inheritance, this
     field is left blank."""
 
-    CATALOG = 'CATALOG'
-    SCHEMA = 'SCHEMA'
+    CATALOG = "CATALOG"
+    SCHEMA = "SCHEMA"
 
 
 @dataclass
@@ -2759,25 +3320,33 @@ class EffectivePrivilege:
     def as_dict(self) -> dict:
         """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
-        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type.value
-        if self.privilege is not None: body['privilege'] = self.privilege.value
+        if self.inherited_from_name is not None:
+            body["inherited_from_name"] = self.inherited_from_name
+        if self.inherited_from_type is not None:
+            body["inherited_from_type"] = self.inherited_from_type.value
+        if self.privilege is not None:
+            body["privilege"] = self.privilege.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
-        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type
-        if self.privilege is not None: body['privilege'] = self.privilege
+        if self.inherited_from_name is not None:
+            body["inherited_from_name"] = self.inherited_from_name
+        if self.inherited_from_type is not None:
+            body["inherited_from_type"] = self.inherited_from_type
+        if self.privilege is not None:
+            body["privilege"] = self.privilege
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilege:
         """Deserializes the EffectivePrivilege from a dictionary."""
-        return cls(inherited_from_name=d.get('inherited_from_name', None),
-                   inherited_from_type=_enum(d, 'inherited_from_type', SecurableType),
-                   privilege=_enum(d, 'privilege', Privilege))
+        return cls(
+            inherited_from_name=d.get("inherited_from_name", None),
+            inherited_from_type=_enum(d, "inherited_from_type", SecurableType),
+            privilege=_enum(d, "privilege", Privilege),
+        )
 
 
 @dataclass
@@ -2791,30 +3360,36 @@ class EffectivePrivilegeAssignment:
     def as_dict(self) -> dict:
         """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.privileges: body['privileges'] = [v.as_dict() for v in self.privileges]
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.privileges:
+            body["privileges"] = [v.as_dict() for v in self.privileges]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.privileges: body['privileges'] = self.privileges
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.privileges:
+            body["privileges"] = self.privileges
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilegeAssignment:
         """Deserializes the EffectivePrivilegeAssignment from a dictionary."""
-        return cls(principal=d.get('principal', None),
-                   privileges=_repeated_dict(d, 'privileges', EffectivePrivilege))
+        return cls(
+            principal=d.get("principal", None),
+            privileges=_repeated_dict(d, "privileges", EffectivePrivilege),
+        )
 
 
 class EnablePredictiveOptimization(Enum):
     """Whether predictive optimization should be enabled for this object and objects under it."""
 
-    DISABLE = 'DISABLE'
-    ENABLE = 'ENABLE'
-    INHERIT = 'INHERIT'
+    DISABLE = "DISABLE"
+    ENABLE = "ENABLE"
+    INHERIT = "INHERIT"
 
 
 @dataclass
@@ -2846,19 +3421,21 @@ class EncryptionDetails:
     def as_dict(self) -> dict:
         """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict()
+        if self.sse_encryption_details:
+            body["sse_encryption_details"] = self.sse_encryption_details.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details
+        if self.sse_encryption_details:
+            body["sse_encryption_details"] = self.sse_encryption_details
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EncryptionDetails:
         """Deserializes the EncryptionDetails from a dictionary."""
-        return cls(sse_encryption_details=_from_dict(d, 'sse_encryption_details', SseEncryptionDetails))
+        return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails))
 
 
 @dataclass
@@ -2919,67 +3496,103 @@ class ExternalLocationInfo:
     def as_dict(self) -> dict:
         """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
-        if self.fallback is not None: body['fallback'] = self.fallback
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.url is not None: body['url'] = self.url
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details.as_dict()
+        if self.fallback is not None:
+            body["fallback"] = self.fallback
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.encryption_details: body['encryption_details'] = self.encryption_details
-        if self.fallback is not None: body['fallback'] = self.fallback
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.url is not None: body['url'] = self.url
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details
+        if self.fallback is not None:
+            body["fallback"] = self.fallback
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalLocationInfo:
         """Deserializes the ExternalLocationInfo from a dictionary."""
-        return cls(access_point=d.get('access_point', None),
-                   browse_only=d.get('browse_only', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   credential_id=d.get('credential_id', None),
-                   credential_name=d.get('credential_name', None),
-                   encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
-                   fallback=d.get('fallback', None),
-                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   read_only=d.get('read_only', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   url=d.get('url', None))
+        return cls(
+            access_point=d.get("access_point", None),
+            browse_only=d.get("browse_only", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            credential_id=d.get("credential_id", None),
+            credential_name=d.get("credential_name", None),
+            encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
+            fallback=d.get("fallback", None),
+            isolation_mode=_enum(d, "isolation_mode", IsolationMode),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            read_only=d.get("read_only", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -3000,23 +3613,27 @@ def as_dict(self) -> dict:
         """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.last_processed_commit_version is not None:
-            body['last_processed_commit_version'] = self.last_processed_commit_version
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+            body["last_processed_commit_version"] = self.last_processed_commit_version
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FailedStatus into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.last_processed_commit_version is not None:
-            body['last_processed_commit_version'] = self.last_processed_commit_version
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+            body["last_processed_commit_version"] = self.last_processed_commit_version
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FailedStatus:
         """Deserializes the FailedStatus from a dictionary."""
-        return cls(last_processed_commit_version=d.get('last_processed_commit_version', None),
-                   timestamp=d.get('timestamp', None))
+        return cls(
+            last_processed_commit_version=d.get("last_processed_commit_version", None),
+            timestamp=d.get("timestamp", None),
+        )
 
 
 @dataclass
@@ -3036,28 +3653,38 @@ class ForeignKeyConstraint:
     def as_dict(self) -> dict:
         """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.child_columns: body['child_columns'] = [v for v in self.child_columns]
-        if self.name is not None: body['name'] = self.name
-        if self.parent_columns: body['parent_columns'] = [v for v in self.parent_columns]
-        if self.parent_table is not None: body['parent_table'] = self.parent_table
+        if self.child_columns:
+            body["child_columns"] = [v for v in self.child_columns]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parent_columns:
+            body["parent_columns"] = [v for v in self.parent_columns]
+        if self.parent_table is not None:
+            body["parent_table"] = self.parent_table
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.child_columns: body['child_columns'] = self.child_columns
-        if self.name is not None: body['name'] = self.name
-        if self.parent_columns: body['parent_columns'] = self.parent_columns
-        if self.parent_table is not None: body['parent_table'] = self.parent_table
+        if self.child_columns:
+            body["child_columns"] = self.child_columns
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parent_columns:
+            body["parent_columns"] = self.parent_columns
+        if self.parent_table is not None:
+            body["parent_table"] = self.parent_table
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForeignKeyConstraint:
         """Deserializes the ForeignKeyConstraint from a dictionary."""
-        return cls(child_columns=d.get('child_columns', None),
-                   name=d.get('name', None),
-                   parent_columns=d.get('parent_columns', None),
-                   parent_table=d.get('parent_table', None))
+        return cls(
+            child_columns=d.get("child_columns", None),
+            name=d.get("name", None),
+            parent_columns=d.get("parent_columns", None),
+            parent_table=d.get("parent_table", None),
+        )
 
 
 @dataclass
@@ -3071,19 +3698,21 @@ class FunctionDependency:
     def as_dict(self) -> dict:
         """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.function_full_name is not None: body['function_full_name'] = self.function_full_name
+        if self.function_full_name is not None:
+            body["function_full_name"] = self.function_full_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.function_full_name is not None: body['function_full_name'] = self.function_full_name
+        if self.function_full_name is not None:
+            body["function_full_name"] = self.function_full_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionDependency:
         """Deserializes the FunctionDependency from a dictionary."""
-        return cls(function_full_name=d.get('function_full_name', None))
+        return cls(function_full_name=d.get("function_full_name", None))
 
 
 @dataclass
@@ -3184,112 +3813,174 @@ class FunctionInfo:
     def as_dict(self) -> dict:
         """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.data_type is not None: body['data_type'] = self.data_type.value
-        if self.external_language is not None: body['external_language'] = self.external_language
-        if self.external_name is not None: body['external_name'] = self.external_name
-        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.function_id is not None: body['function_id'] = self.function_id
-        if self.input_params: body['input_params'] = self.input_params.as_dict()
-        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
-        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value
-        if self.properties is not None: body['properties'] = self.properties
-        if self.return_params: body['return_params'] = self.return_params.as_dict()
-        if self.routine_body is not None: body['routine_body'] = self.routine_body.value
-        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
-        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict()
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.security_type is not None: body['security_type'] = self.security_type.value
-        if self.specific_name is not None: body['specific_name'] = self.specific_name
-        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access.value
-        if self.sql_path is not None: body['sql_path'] = self.sql_path
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.data_type is not None:
+            body["data_type"] = self.data_type.value
+        if self.external_language is not None:
+            body["external_language"] = self.external_language
+        if self.external_name is not None:
+            body["external_name"] = self.external_name
+        if self.full_data_type is not None:
+            body["full_data_type"] = self.full_data_type
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.function_id is not None:
+            body["function_id"] = self.function_id
+        if self.input_params:
+            body["input_params"] = self.input_params.as_dict()
+        if self.is_deterministic is not None:
+            body["is_deterministic"] = self.is_deterministic
+        if self.is_null_call is not None:
+            body["is_null_call"] = self.is_null_call
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.parameter_style is not None:
+            body["parameter_style"] = self.parameter_style.value
+        if self.properties is not None:
+            body["properties"] = self.properties
+        if self.return_params:
+            body["return_params"] = self.return_params.as_dict()
+        if self.routine_body is not None:
+            body["routine_body"] = self.routine_body.value
+        if self.routine_definition is not None:
+            body["routine_definition"] = self.routine_definition
+        if self.routine_dependencies:
+            body["routine_dependencies"] = self.routine_dependencies.as_dict()
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.security_type is not None:
+            body["security_type"] = self.security_type.value
+        if self.specific_name is not None:
+            body["specific_name"] = self.specific_name
+        if self.sql_data_access is not None:
+            body["sql_data_access"] = self.sql_data_access.value
+        if self.sql_path is not None:
+            body["sql_path"] = self.sql_path
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.data_type is not None: body['data_type'] = self.data_type
-        if self.external_language is not None: body['external_language'] = self.external_language
-        if self.external_name is not None: body['external_name'] = self.external_name
-        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.function_id is not None: body['function_id'] = self.function_id
-        if self.input_params: body['input_params'] = self.input_params
-        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
-        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style
-        if self.properties is not None: body['properties'] = self.properties
-        if self.return_params: body['return_params'] = self.return_params
-        if self.routine_body is not None: body['routine_body'] = self.routine_body
-        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
-        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.security_type is not None: body['security_type'] = self.security_type
-        if self.specific_name is not None: body['specific_name'] = self.specific_name
-        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access
-        if self.sql_path is not None: body['sql_path'] = self.sql_path
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.data_type is not None:
+            body["data_type"] = self.data_type
+        if self.external_language is not None:
+            body["external_language"] = self.external_language
+        if self.external_name is not None:
+            body["external_name"] = self.external_name
+        if self.full_data_type is not None:
+            body["full_data_type"] = self.full_data_type
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.function_id is not None:
+            body["function_id"] = self.function_id
+        if self.input_params:
+            body["input_params"] = self.input_params
+        if self.is_deterministic is not None:
+            body["is_deterministic"] = self.is_deterministic
+        if self.is_null_call is not None:
+            body["is_null_call"] = self.is_null_call
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.parameter_style is not None:
+            body["parameter_style"] = self.parameter_style
+        if self.properties is not None:
+            body["properties"] = self.properties
+        if self.return_params:
+            body["return_params"] = self.return_params
+        if self.routine_body is not None:
+            body["routine_body"] = self.routine_body
+        if self.routine_definition is not None:
+            body["routine_definition"] = self.routine_definition
+        if self.routine_dependencies:
+            body["routine_dependencies"] = self.routine_dependencies
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.security_type is not None:
+            body["security_type"] = self.security_type
+        if self.specific_name is not None:
+            body["specific_name"] = self.specific_name
+        if self.sql_data_access is not None:
+            body["sql_data_access"] = self.sql_data_access
+        if self.sql_path is not None:
+            body["sql_path"] = self.sql_path
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionInfo:
         """Deserializes the FunctionInfo from a dictionary."""
-        return cls(browse_only=d.get('browse_only', None),
-                   catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   data_type=_enum(d, 'data_type', ColumnTypeName),
-                   external_language=d.get('external_language', None),
-                   external_name=d.get('external_name', None),
-                   full_data_type=d.get('full_data_type', None),
-                   full_name=d.get('full_name', None),
-                   function_id=d.get('function_id', None),
-                   input_params=_from_dict(d, 'input_params', FunctionParameterInfos),
-                   is_deterministic=d.get('is_deterministic', None),
-                   is_null_call=d.get('is_null_call', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   parameter_style=_enum(d, 'parameter_style', FunctionInfoParameterStyle),
-                   properties=d.get('properties', None),
-                   return_params=_from_dict(d, 'return_params', FunctionParameterInfos),
-                   routine_body=_enum(d, 'routine_body', FunctionInfoRoutineBody),
-                   routine_definition=d.get('routine_definition', None),
-                   routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList),
-                   schema_name=d.get('schema_name', None),
-                   security_type=_enum(d, 'security_type', FunctionInfoSecurityType),
-                   specific_name=d.get('specific_name', None),
-                   sql_data_access=_enum(d, 'sql_data_access', FunctionInfoSqlDataAccess),
-                   sql_path=d.get('sql_path', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            browse_only=d.get("browse_only", None),
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            data_type=_enum(d, "data_type", ColumnTypeName),
+            external_language=d.get("external_language", None),
+            external_name=d.get("external_name", None),
+            full_data_type=d.get("full_data_type", None),
+            full_name=d.get("full_name", None),
+            function_id=d.get("function_id", None),
+            input_params=_from_dict(d, "input_params", FunctionParameterInfos),
+            is_deterministic=d.get("is_deterministic", None),
+            is_null_call=d.get("is_null_call", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle),
+            properties=d.get("properties", None),
+            return_params=_from_dict(d, "return_params", FunctionParameterInfos),
+            routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody),
+            routine_definition=d.get("routine_definition", None),
+            routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList),
+            schema_name=d.get("schema_name", None),
+            security_type=_enum(d, "security_type", FunctionInfoSecurityType),
+            specific_name=d.get("specific_name", None),
+            sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess),
+            sql_path=d.get("sql_path", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 class FunctionInfoParameterStyle(Enum):
     """Function parameter style. **S** is the value for SQL."""
 
-    S = 'S'
+    S = "S"
 
 
 class FunctionInfoRoutineBody(Enum):
@@ -3298,22 +3989,22 @@ class FunctionInfoRoutineBody(Enum):
     be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be
     **NO_SQL**."""
 
-    EXTERNAL = 'EXTERNAL'
-    SQL = 'SQL'
+    EXTERNAL = "EXTERNAL"
+    SQL = "SQL"
 
 
 class FunctionInfoSecurityType(Enum):
     """The security type of the function."""
 
-    DEFINER = 'DEFINER'
+    DEFINER = "DEFINER"
 
 
 class FunctionInfoSqlDataAccess(Enum):
     """Function SQL data access."""
 
-    CONTAINS_SQL = 'CONTAINS_SQL'
-    NO_SQL = 'NO_SQL'
-    READS_SQL_DATA = 'READS_SQL_DATA'
+    CONTAINS_SQL = "CONTAINS_SQL"
+    NO_SQL = "NO_SQL"
+    READS_SQL_DATA = "READS_SQL_DATA"
 
 
 @dataclass
@@ -3356,52 +4047,78 @@ class FunctionParameterInfo:
     def as_dict(self) -> dict:
         """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.parameter_default is not None: body['parameter_default'] = self.parameter_default
-        if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode.value
-        if self.parameter_type is not None: body['parameter_type'] = self.parameter_type.value
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_json is not None: body['type_json'] = self.type_json
-        if self.type_name is not None: body['type_name'] = self.type_name.value
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parameter_default is not None:
+            body["parameter_default"] = self.parameter_default
+        if self.parameter_mode is not None:
+            body["parameter_mode"] = self.parameter_mode.value
+        if self.parameter_type is not None:
+            body["parameter_type"] = self.parameter_type.value
+        if self.position is not None:
+            body["position"] = self.position
+        if self.type_interval_type is not None:
+            body["type_interval_type"] = self.type_interval_type
+        if self.type_json is not None:
+            body["type_json"] = self.type_json
+        if self.type_name is not None:
+            body["type_name"] = self.type_name.value
+        if self.type_precision is not None:
+            body["type_precision"] = self.type_precision
+        if self.type_scale is not None:
+            body["type_scale"] = self.type_scale
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.parameter_default is not None: body['parameter_default'] = self.parameter_default
-        if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode
-        if self.parameter_type is not None: body['parameter_type'] = self.parameter_type
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_json is not None: body['type_json'] = self.type_json
-        if self.type_name is not None: body['type_name'] = self.type_name
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parameter_default is not None:
+            body["parameter_default"] = self.parameter_default
+        if self.parameter_mode is not None:
+            body["parameter_mode"] = self.parameter_mode
+        if self.parameter_type is not None:
+            body["parameter_type"] = self.parameter_type
+        if self.position is not None:
+            body["position"] = self.position
+        if self.type_interval_type is not None:
+            body["type_interval_type"] = self.type_interval_type
+        if self.type_json is not None:
+            body["type_json"] = self.type_json
+        if self.type_name is not None:
+            body["type_name"] = self.type_name
+        if self.type_precision is not None:
+            body["type_precision"] = self.type_precision
+        if self.type_scale is not None:
+            body["type_scale"] = self.type_scale
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfo:
         """Deserializes the FunctionParameterInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   parameter_default=d.get('parameter_default', None),
-                   parameter_mode=_enum(d, 'parameter_mode', FunctionParameterMode),
-                   parameter_type=_enum(d, 'parameter_type', FunctionParameterType),
-                   position=d.get('position', None),
-                   type_interval_type=d.get('type_interval_type', None),
-                   type_json=d.get('type_json', None),
-                   type_name=_enum(d, 'type_name', ColumnTypeName),
-                   type_precision=d.get('type_precision', None),
-                   type_scale=d.get('type_scale', None),
-                   type_text=d.get('type_text', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            parameter_default=d.get("parameter_default", None),
+            parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode),
+            parameter_type=_enum(d, "parameter_type", FunctionParameterType),
+            position=d.get("position", None),
+            type_interval_type=d.get("type_interval_type", None),
+            type_json=d.get("type_json", None),
+            type_name=_enum(d, "type_name", ColumnTypeName),
+            type_precision=d.get("type_precision", None),
+            type_scale=d.get("type_scale", None),
+            type_text=d.get("type_text", None),
+        )
 
 
 @dataclass
@@ -3412,32 +4129,34 @@ class FunctionParameterInfos:
     def as_dict(self) -> dict:
         """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.parameters: body['parameters'] = self.parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfos:
         """Deserializes the FunctionParameterInfos from a dictionary."""
-        return cls(parameters=_repeated_dict(d, 'parameters', FunctionParameterInfo))
+        return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo))
 
 
 class FunctionParameterMode(Enum):
     """The mode of the function parameter."""
 
-    IN = 'IN'
+    IN = "IN"
 
 
 class FunctionParameterType(Enum):
     """The type of function parameter."""
 
-    COLUMN = 'COLUMN'
-    PARAM = 'PARAM'
+    COLUMN = "COLUMN"
+    PARAM = "PARAM"
 
 
 @dataclass
@@ -3450,19 +4169,21 @@ class GcpOauthToken:
     def as_dict(self) -> dict:
         """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
+        if self.oauth_token is not None:
+            body["oauth_token"] = self.oauth_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
+        if self.oauth_token is not None:
+            body["oauth_token"] = self.oauth_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
         """Deserializes the GcpOauthToken from a dictionary."""
-        return cls(oauth_token=d.get('oauth_token', None))
+        return cls(oauth_token=d.get("oauth_token", None))
 
 
 @dataclass
@@ -3477,19 +4198,21 @@ class GenerateTemporaryServiceCredentialAzureOptions:
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.resources: body['resources'] = [v for v in self.resources]
+        if self.resources:
+            body["resources"] = [v for v in self.resources]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.resources: body['resources'] = self.resources
+        if self.resources:
+            body["resources"] = self.resources
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzureOptions:
         """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary."""
-        return cls(resources=d.get('resources', None))
+        return cls(resources=d.get("resources", None))
 
 
 @dataclass
@@ -3504,19 +4227,21 @@ class GenerateTemporaryServiceCredentialGcpOptions:
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.scopes: body['scopes'] = self.scopes
+        if self.scopes:
+            body["scopes"] = self.scopes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialGcpOptions:
         """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary."""
-        return cls(scopes=d.get('scopes', None))
+        return cls(scopes=d.get("scopes", None))
 
 
 @dataclass
@@ -3533,26 +4258,37 @@ class GenerateTemporaryServiceCredentialRequest:
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.azure_options: body['azure_options'] = self.azure_options.as_dict()
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.gcp_options: body['gcp_options'] = self.gcp_options.as_dict()
+        if self.azure_options:
+            body["azure_options"] = self.azure_options.as_dict()
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.gcp_options:
+            body["gcp_options"] = self.gcp_options.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.azure_options: body['azure_options'] = self.azure_options
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.gcp_options: body['gcp_options'] = self.gcp_options
+        if self.azure_options:
+            body["azure_options"] = self.azure_options
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.gcp_options:
+            body["gcp_options"] = self.gcp_options
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialRequest:
         """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary."""
-        return cls(azure_options=_from_dict(d, 'azure_options',
-                                            GenerateTemporaryServiceCredentialAzureOptions),
-                   credential_name=d.get('credential_name', None),
-                   gcp_options=_from_dict(d, 'gcp_options', GenerateTemporaryServiceCredentialGcpOptions))
+        return cls(
+            azure_options=_from_dict(
+                d,
+                "azure_options",
+                GenerateTemporaryServiceCredentialAzureOptions,
+            ),
+            credential_name=d.get("credential_name", None),
+            gcp_options=_from_dict(d, "gcp_options", GenerateTemporaryServiceCredentialGcpOptions),
+        )
 
 
 @dataclass
@@ -3568,21 +4304,28 @@ class GenerateTemporaryTableCredentialRequest:
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.operation is not None: body['operation'] = self.operation.value
-        if self.table_id is not None: body['table_id'] = self.table_id
+        if self.operation is not None:
+            body["operation"] = self.operation.value
+        if self.table_id is not None:
+            body["table_id"] = self.table_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.operation is not None: body['operation'] = self.operation
-        if self.table_id is not None: body['table_id'] = self.table_id
+        if self.operation is not None:
+            body["operation"] = self.operation
+        if self.table_id is not None:
+            body["table_id"] = self.table_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest:
         """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary."""
-        return cls(operation=_enum(d, 'operation', TableOperation), table_id=d.get('table_id', None))
+        return cls(
+            operation=_enum(d, "operation", TableOperation),
+            table_id=d.get("table_id", None),
+        )
 
 
 @dataclass
@@ -3618,47 +4361,61 @@ class GenerateTemporaryTableCredentialResponse:
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
-        if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
+        if self.aws_temp_credentials:
+            body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict()
+        if self.azure_aad:
+            body["azure_aad"] = self.azure_aad.as_dict()
         if self.azure_user_delegation_sas:
-            body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict()
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
-        if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials.as_dict()
-        if self.url is not None: body['url'] = self.url
+            body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict()
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.gcp_oauth_token:
+            body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict()
+        if self.r2_temp_credentials:
+            body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict()
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
-        if self.azure_aad: body['azure_aad'] = self.azure_aad
-        if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token
-        if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials
-        if self.url is not None: body['url'] = self.url
+        if self.aws_temp_credentials:
+            body["aws_temp_credentials"] = self.aws_temp_credentials
+        if self.azure_aad:
+            body["azure_aad"] = self.azure_aad
+        if self.azure_user_delegation_sas:
+            body["azure_user_delegation_sas"] = self.azure_user_delegation_sas
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.gcp_oauth_token:
+            body["gcp_oauth_token"] = self.gcp_oauth_token
+        if self.r2_temp_credentials:
+            body["r2_temp_credentials"] = self.r2_temp_credentials
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
         """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
-        return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
-                   azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
-                   azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas',
-                                                        AzureUserDelegationSas),
-                   expiration_time=d.get('expiration_time', None),
-                   gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken),
-                   r2_temp_credentials=_from_dict(d, 'r2_temp_credentials', R2Credentials),
-                   url=d.get('url', None))
+        return cls(
+            aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials),
+            azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken),
+            azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas),
+            expiration_time=d.get("expiration_time", None),
+            gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken),
+            r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials),
+            url=d.get("url", None),
+        )
 
 
 class GetBindingsSecurableType(Enum):
 
-    CATALOG = 'catalog'
-    CREDENTIAL = 'credential'
-    EXTERNAL_LOCATION = 'external_location'
-    STORAGE_CREDENTIAL = 'storage_credential'
+    CATALOG = "catalog"
+    CREDENTIAL = "credential"
+    EXTERNAL_LOCATION = "external_location"
+    STORAGE_CREDENTIAL = "storage_credential"
 
 
 @dataclass
@@ -3724,98 +4481,130 @@ class GetMetastoreSummaryResponse:
     def as_dict(self) -> dict:
         """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.default_data_access_config_id is not None:
-            body['default_data_access_config_id'] = self.default_data_access_config_id
+            body["default_data_access_config_id"] = self.default_data_access_config_id
         if self.delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = self.delta_sharing_organization_name
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                self.delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if self.delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = self.delta_sharing_scope.value
         if self.external_access_enabled is not None:
-            body['external_access_enabled'] = self.external_access_enabled
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
+            body["external_access_enabled"] = self.external_access_enabled
+        if self.global_metastore_id is not None:
+            body["global_metastore_id"] = self.global_metastore_id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
         if self.privilege_model_version is not None:
-            body['privilege_model_version'] = self.privilege_model_version
-        if self.region is not None: body['region'] = self.region
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+            body["privilege_model_version"] = self.privilege_model_version
+        if self.region is not None:
+            body["region"] = self.region
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         if self.storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = self.storage_root_credential_id
+            body["storage_root_credential_id"] = self.storage_root_credential_id
         if self.storage_root_credential_name is not None:
-            body['storage_root_credential_name'] = self.storage_root_credential_name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["storage_root_credential_name"] = self.storage_root_credential_name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.default_data_access_config_id is not None:
-            body['default_data_access_config_id'] = self.default_data_access_config_id
+            body["default_data_access_config_id"] = self.default_data_access_config_id
         if self.delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = self.delta_sharing_organization_name
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                self.delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if self.delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = self.delta_sharing_scope
         if self.external_access_enabled is not None:
-            body['external_access_enabled'] = self.external_access_enabled
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
+            body["external_access_enabled"] = self.external_access_enabled
+        if self.global_metastore_id is not None:
+            body["global_metastore_id"] = self.global_metastore_id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
         if self.privilege_model_version is not None:
-            body['privilege_model_version'] = self.privilege_model_version
-        if self.region is not None: body['region'] = self.region
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+            body["privilege_model_version"] = self.privilege_model_version
+        if self.region is not None:
+            body["region"] = self.region
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         if self.storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = self.storage_root_credential_id
+            body["storage_root_credential_id"] = self.storage_root_credential_id
         if self.storage_root_credential_name is not None:
-            body['storage_root_credential_name'] = self.storage_root_credential_name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["storage_root_credential_name"] = self.storage_root_credential_name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetastoreSummaryResponse:
         """Deserializes the GetMetastoreSummaryResponse from a dictionary."""
-        return cls(cloud=d.get('cloud', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   default_data_access_config_id=d.get('default_data_access_config_id', None),
-                   delta_sharing_organization_name=d.get('delta_sharing_organization_name', None),
-                   delta_sharing_recipient_token_lifetime_in_seconds=d.get(
-                       'delta_sharing_recipient_token_lifetime_in_seconds', None),
-                   delta_sharing_scope=_enum(d, 'delta_sharing_scope',
-                                             GetMetastoreSummaryResponseDeltaSharingScope),
-                   external_access_enabled=d.get('external_access_enabled', None),
-                   global_metastore_id=d.get('global_metastore_id', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   privilege_model_version=d.get('privilege_model_version', None),
-                   region=d.get('region', None),
-                   storage_root=d.get('storage_root', None),
-                   storage_root_credential_id=d.get('storage_root_credential_id', None),
-                   storage_root_credential_name=d.get('storage_root_credential_name', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            cloud=d.get("cloud", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            default_data_access_config_id=d.get("default_data_access_config_id", None),
+            delta_sharing_organization_name=d.get("delta_sharing_organization_name", None),
+            delta_sharing_recipient_token_lifetime_in_seconds=d.get(
+                "delta_sharing_recipient_token_lifetime_in_seconds", None
+            ),
+            delta_sharing_scope=_enum(
+                d,
+                "delta_sharing_scope",
+                GetMetastoreSummaryResponseDeltaSharingScope,
+            ),
+            external_access_enabled=d.get("external_access_enabled", None),
+            global_metastore_id=d.get("global_metastore_id", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            privilege_model_version=d.get("privilege_model_version", None),
+            region=d.get("region", None),
+            storage_root=d.get("storage_root", None),
+            storage_root_credential_id=d.get("storage_root_credential_id", None),
+            storage_root_credential_name=d.get("storage_root_credential_name", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 class GetMetastoreSummaryResponseDeltaSharingScope(Enum):
     """The scope of Delta Sharing enabled for the metastore."""
 
-    INTERNAL = 'INTERNAL'
-    INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL'
+    INTERNAL = "INTERNAL"
+    INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL"
 
 
 @dataclass
@@ -3826,25 +4615,27 @@ class GetQuotaResponse:
     def as_dict(self) -> dict:
         """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.quota_info: body['quota_info'] = self.quota_info.as_dict()
+        if self.quota_info:
+            body["quota_info"] = self.quota_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.quota_info: body['quota_info'] = self.quota_info
+        if self.quota_info:
+            body["quota_info"] = self.quota_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse:
         """Deserializes the GetQuotaResponse from a dictionary."""
-        return cls(quota_info=_from_dict(d, 'quota_info', QuotaInfo))
+        return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo))
 
 
 class IsolationMode(Enum):
 
-    ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED'
-    ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN'
+    ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED"
+    ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN"
 
 
 @dataclass
@@ -3856,19 +4647,21 @@ class ListAccountMetastoreAssignmentsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.workspace_ids: body['workspace_ids'] = [v for v in self.workspace_ids]
+        if self.workspace_ids:
+            body["workspace_ids"] = [v for v in self.workspace_ids]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.workspace_ids: body['workspace_ids'] = self.workspace_ids
+        if self.workspace_ids:
+            body["workspace_ids"] = self.workspace_ids
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAccountMetastoreAssignmentsResponse:
         """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary."""
-        return cls(workspace_ids=d.get('workspace_ids', None))
+        return cls(workspace_ids=d.get("workspace_ids", None))
 
 
 @dataclass
@@ -3880,19 +4673,20 @@ def as_dict(self) -> dict:
         """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.storage_credentials:
-            body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
+            body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.storage_credentials: body['storage_credentials'] = self.storage_credentials
+        if self.storage_credentials:
+            body["storage_credentials"] = self.storage_credentials
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAccountStorageCredentialsResponse:
         """Deserializes the ListAccountStorageCredentialsResponse from a dictionary."""
-        return cls(storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo))
+        return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo))
 
 
 @dataclass
@@ -3907,22 +4701,28 @@ class ListCatalogsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalogs: body['catalogs'] = [v.as_dict() for v in self.catalogs]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.catalogs:
+            body["catalogs"] = [v.as_dict() for v in self.catalogs]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalogs: body['catalogs'] = self.catalogs
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.catalogs:
+            body["catalogs"] = self.catalogs
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCatalogsResponse:
         """Deserializes the ListCatalogsResponse from a dictionary."""
-        return cls(catalogs=_repeated_dict(d, 'catalogs', CatalogInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            catalogs=_repeated_dict(d, "catalogs", CatalogInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -3937,22 +4737,28 @@ class ListConnectionsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.connections: body['connections'] = [v.as_dict() for v in self.connections]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.connections:
+            body["connections"] = [v.as_dict() for v in self.connections]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connections: body['connections'] = self.connections
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.connections:
+            body["connections"] = self.connections
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse:
         """Deserializes the ListConnectionsResponse from a dictionary."""
-        return cls(connections=_repeated_dict(d, 'connections', ConnectionInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            connections=_repeated_dict(d, "connections", ConnectionInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -3966,22 +4772,28 @@ class ListCredentialsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.credentials:
+            body["credentials"] = [v.as_dict() for v in self.credentials]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credentials: body['credentials'] = self.credentials
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.credentials:
+            body["credentials"] = self.credentials
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
         """Deserializes the ListCredentialsResponse from a dictionary."""
-        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            credentials=_repeated_dict(d, "credentials", CredentialInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -3997,22 +4809,27 @@ def as_dict(self) -> dict:
         """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.external_locations:
-            body['external_locations'] = [v.as_dict() for v in self.external_locations]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+            body["external_locations"] = [v.as_dict() for v in self.external_locations]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.external_locations: body['external_locations'] = self.external_locations
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.external_locations:
+            body["external_locations"] = self.external_locations
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExternalLocationsResponse:
         """Deserializes the ListExternalLocationsResponse from a dictionary."""
-        return cls(external_locations=_repeated_dict(d, 'external_locations', ExternalLocationInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -4027,22 +4844,28 @@ class ListFunctionsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.functions: body['functions'] = [v.as_dict() for v in self.functions]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.functions:
+            body["functions"] = [v.as_dict() for v in self.functions]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.functions: body['functions'] = self.functions
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.functions:
+            body["functions"] = self.functions
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFunctionsResponse:
         """Deserializes the ListFunctionsResponse from a dictionary."""
-        return cls(functions=_repeated_dict(d, 'functions', FunctionInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            functions=_repeated_dict(d, "functions", FunctionInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -4053,19 +4876,21 @@ class ListMetastoresResponse:
     def as_dict(self) -> dict:
         """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metastores: body['metastores'] = [v.as_dict() for v in self.metastores]
+        if self.metastores:
+            body["metastores"] = [v.as_dict() for v in self.metastores]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metastores: body['metastores'] = self.metastores
+        if self.metastores:
+            body["metastores"] = self.metastores
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListMetastoresResponse:
         """Deserializes the ListMetastoresResponse from a dictionary."""
-        return cls(metastores=_repeated_dict(d, 'metastores', MetastoreInfo))
+        return cls(metastores=_repeated_dict(d, "metastores", MetastoreInfo))
 
 
 @dataclass
@@ -4079,22 +4904,28 @@ class ListModelVersionsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.model_versions:
+            body["model_versions"] = [v.as_dict() for v in self.model_versions]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_versions: body['model_versions'] = self.model_versions
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.model_versions:
+            body["model_versions"] = self.model_versions
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListModelVersionsResponse:
         """Deserializes the ListModelVersionsResponse from a dictionary."""
-        return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersionInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -4109,22 +4940,28 @@ class ListQuotasResponse:
     def as_dict(self) -> dict:
         """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.quotas:
+            body["quotas"] = [v.as_dict() for v in self.quotas]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.quotas: body['quotas'] = self.quotas
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.quotas:
+            body["quotas"] = self.quotas
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQuotasResponse:
         """Deserializes the ListQuotasResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   quotas=_repeated_dict(d, 'quotas', QuotaInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            quotas=_repeated_dict(d, "quotas", QuotaInfo),
+        )
 
 
 @dataclass
@@ -4138,22 +4975,28 @@ class ListRegisteredModelsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.registered_models:
+            body["registered_models"] = [v.as_dict() for v in self.registered_models]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.registered_models: body['registered_models'] = self.registered_models
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.registered_models:
+            body["registered_models"] = self.registered_models
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRegisteredModelsResponse:
         """Deserializes the ListRegisteredModelsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   registered_models=_repeated_dict(d, 'registered_models', RegisteredModelInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo),
+        )
 
 
 @dataclass
@@ -4168,22 +5011,28 @@ class ListSchemasResponse:
     def as_dict(self) -> dict:
         """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.schemas:
+            body["schemas"] = [v.as_dict() for v in self.schemas]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.schemas: body['schemas'] = self.schemas
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.schemas:
+            body["schemas"] = self.schemas
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSchemasResponse:
         """Deserializes the ListSchemasResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   schemas=_repeated_dict(d, 'schemas', SchemaInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            schemas=_repeated_dict(d, "schemas", SchemaInfo),
+        )
 
 
 @dataclass
@@ -4197,23 +5046,28 @@ class ListStorageCredentialsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         if self.storage_credentials:
-            body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
+            body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.storage_credentials: body['storage_credentials'] = self.storage_credentials
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.storage_credentials:
+            body["storage_credentials"] = self.storage_credentials
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse:
         """Deserializes the ListStorageCredentialsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo),
+        )
 
 
 @dataclass
@@ -4228,22 +5082,28 @@ class ListSystemSchemasResponse:
     def as_dict(self) -> dict:
         """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.schemas:
+            body["schemas"] = [v.as_dict() for v in self.schemas]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.schemas: body['schemas'] = self.schemas
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.schemas:
+            body["schemas"] = self.schemas
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse:
         """Deserializes the ListSystemSchemasResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            schemas=_repeated_dict(d, "schemas", SystemSchemaInfo),
+        )
 
 
 @dataclass
@@ -4258,22 +5118,28 @@ class ListTableSummariesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.tables:
+            body["tables"] = [v.as_dict() for v in self.tables]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.tables: body['tables'] = self.tables
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.tables:
+            body["tables"] = self.tables
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTableSummariesResponse:
         """Deserializes the ListTableSummariesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   tables=_repeated_dict(d, 'tables', TableSummary))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            tables=_repeated_dict(d, "tables", TableSummary),
+        )
 
 
 @dataclass
@@ -4288,22 +5154,28 @@ class ListTablesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.tables:
+            body["tables"] = [v.as_dict() for v in self.tables]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.tables: body['tables'] = self.tables
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.tables:
+            body["tables"] = self.tables
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTablesResponse:
         """Deserializes the ListTablesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   tables=_repeated_dict(d, 'tables', TableInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            tables=_repeated_dict(d, "tables", TableInfo),
+        )
 
 
 @dataclass
@@ -4318,28 +5190,34 @@ class ListVolumesResponseContent:
     def as_dict(self) -> dict:
         """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.volumes:
+            body["volumes"] = [v.as_dict() for v in self.volumes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.volumes: body['volumes'] = self.volumes
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.volumes:
+            body["volumes"] = self.volumes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVolumesResponseContent:
         """Deserializes the ListVolumesResponseContent from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   volumes=_repeated_dict(d, 'volumes', VolumeInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            volumes=_repeated_dict(d, "volumes", VolumeInfo),
+        )
 
 
 class MatchType(Enum):
     """The artifact pattern matching type"""
 
-    PREFIX_MATCH = 'PREFIX_MATCH'
+    PREFIX_MATCH = "PREFIX_MATCH"
 
 
 @dataclass
@@ -4356,25 +5234,33 @@ class MetastoreAssignment:
     def as_dict(self) -> dict:
         """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.default_catalog_name is not None:
+            body["default_catalog_name"] = self.default_catalog_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.default_catalog_name is not None:
+            body["default_catalog_name"] = self.default_catalog_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MetastoreAssignment:
         """Deserializes the MetastoreAssignment from a dictionary."""
-        return cls(default_catalog_name=d.get('default_catalog_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            default_catalog_name=d.get("default_catalog_name", None),
+            metastore_id=d.get("metastore_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -4440,97 +5326,126 @@ class MetastoreInfo:
     def as_dict(self) -> dict:
         """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.default_data_access_config_id is not None:
-            body['default_data_access_config_id'] = self.default_data_access_config_id
+            body["default_data_access_config_id"] = self.default_data_access_config_id
         if self.delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = self.delta_sharing_organization_name
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                self.delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if self.delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = self.delta_sharing_scope.value
         if self.external_access_enabled is not None:
-            body['external_access_enabled'] = self.external_access_enabled
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
+            body["external_access_enabled"] = self.external_access_enabled
+        if self.global_metastore_id is not None:
+            body["global_metastore_id"] = self.global_metastore_id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
         if self.privilege_model_version is not None:
-            body['privilege_model_version'] = self.privilege_model_version
-        if self.region is not None: body['region'] = self.region
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+            body["privilege_model_version"] = self.privilege_model_version
+        if self.region is not None:
+            body["region"] = self.region
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         if self.storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = self.storage_root_credential_id
+            body["storage_root_credential_id"] = self.storage_root_credential_id
         if self.storage_root_credential_name is not None:
-            body['storage_root_credential_name'] = self.storage_root_credential_name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["storage_root_credential_name"] = self.storage_root_credential_name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.default_data_access_config_id is not None:
-            body['default_data_access_config_id'] = self.default_data_access_config_id
+            body["default_data_access_config_id"] = self.default_data_access_config_id
         if self.delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = self.delta_sharing_organization_name
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                self.delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if self.delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = self.delta_sharing_scope
         if self.external_access_enabled is not None:
-            body['external_access_enabled'] = self.external_access_enabled
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
+            body["external_access_enabled"] = self.external_access_enabled
+        if self.global_metastore_id is not None:
+            body["global_metastore_id"] = self.global_metastore_id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
         if self.privilege_model_version is not None:
-            body['privilege_model_version'] = self.privilege_model_version
-        if self.region is not None: body['region'] = self.region
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+            body["privilege_model_version"] = self.privilege_model_version
+        if self.region is not None:
+            body["region"] = self.region
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         if self.storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = self.storage_root_credential_id
+            body["storage_root_credential_id"] = self.storage_root_credential_id
         if self.storage_root_credential_name is not None:
-            body['storage_root_credential_name'] = self.storage_root_credential_name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["storage_root_credential_name"] = self.storage_root_credential_name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MetastoreInfo:
         """Deserializes the MetastoreInfo from a dictionary."""
-        return cls(cloud=d.get('cloud', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   default_data_access_config_id=d.get('default_data_access_config_id', None),
-                   delta_sharing_organization_name=d.get('delta_sharing_organization_name', None),
-                   delta_sharing_recipient_token_lifetime_in_seconds=d.get(
-                       'delta_sharing_recipient_token_lifetime_in_seconds', None),
-                   delta_sharing_scope=_enum(d, 'delta_sharing_scope', MetastoreInfoDeltaSharingScope),
-                   external_access_enabled=d.get('external_access_enabled', None),
-                   global_metastore_id=d.get('global_metastore_id', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   privilege_model_version=d.get('privilege_model_version', None),
-                   region=d.get('region', None),
-                   storage_root=d.get('storage_root', None),
-                   storage_root_credential_id=d.get('storage_root_credential_id', None),
-                   storage_root_credential_name=d.get('storage_root_credential_name', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            cloud=d.get("cloud", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            default_data_access_config_id=d.get("default_data_access_config_id", None),
+            delta_sharing_organization_name=d.get("delta_sharing_organization_name", None),
+            delta_sharing_recipient_token_lifetime_in_seconds=d.get(
+                "delta_sharing_recipient_token_lifetime_in_seconds", None
+            ),
+            delta_sharing_scope=_enum(d, "delta_sharing_scope", MetastoreInfoDeltaSharingScope),
+            external_access_enabled=d.get("external_access_enabled", None),
+            global_metastore_id=d.get("global_metastore_id", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            privilege_model_version=d.get("privilege_model_version", None),
+            region=d.get("region", None),
+            storage_root=d.get("storage_root", None),
+            storage_root_credential_id=d.get("storage_root_credential_id", None),
+            storage_root_credential_name=d.get("storage_root_credential_name", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 class MetastoreInfoDeltaSharingScope(Enum):
     """The scope of Delta Sharing enabled for the metastore."""
 
-    INTERNAL = 'INTERNAL'
-    INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL'
+    INTERNAL = "INTERNAL"
+    INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL"
 
 
 @dataclass
@@ -4598,85 +5513,124 @@ class ModelVersionInfo:
     def as_dict(self) -> dict:
         """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases]
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.id is not None: body['id'] = self.id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.aliases:
+            body["aliases"] = [v.as_dict() for v in self.aliases]
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.id is not None:
+            body["id"] = self.id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
         if self.model_version_dependencies:
-            body['model_version_dependencies'] = self.model_version_dependencies.as_dict()
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.source is not None: body['source'] = self.source
-        if self.status is not None: body['status'] = self.status.value
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.version is not None: body['version'] = self.version
+            body["model_version_dependencies"] = self.model_version_dependencies.as_dict()
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_workspace_id is not None:
+            body["run_workspace_id"] = self.run_workspace_id
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.source is not None:
+            body["source"] = self.source
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aliases: body['aliases'] = self.aliases
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.id is not None: body['id'] = self.id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.aliases:
+            body["aliases"] = self.aliases
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.id is not None:
+            body["id"] = self.id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
         if self.model_version_dependencies:
-            body['model_version_dependencies'] = self.model_version_dependencies
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.source is not None: body['source'] = self.source
-        if self.status is not None: body['status'] = self.status
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.version is not None: body['version'] = self.version
+            body["model_version_dependencies"] = self.model_version_dependencies
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_workspace_id is not None:
+            body["run_workspace_id"] = self.run_workspace_id
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.source is not None:
+            body["source"] = self.source
+        if self.status is not None:
+            body["status"] = self.status
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo:
         """Deserializes the ModelVersionInfo from a dictionary."""
-        return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias),
-                   browse_only=d.get('browse_only', None),
-                   catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   id=d.get('id', None),
-                   metastore_id=d.get('metastore_id', None),
-                   model_name=d.get('model_name', None),
-                   model_version_dependencies=_from_dict(d, 'model_version_dependencies', DependencyList),
-                   run_id=d.get('run_id', None),
-                   run_workspace_id=d.get('run_workspace_id', None),
-                   schema_name=d.get('schema_name', None),
-                   source=d.get('source', None),
-                   status=_enum(d, 'status', ModelVersionInfoStatus),
-                   storage_location=d.get('storage_location', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   version=d.get('version', None))
+        return cls(
+            aliases=_repeated_dict(d, "aliases", RegisteredModelAlias),
+            browse_only=d.get("browse_only", None),
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            id=d.get("id", None),
+            metastore_id=d.get("metastore_id", None),
+            model_name=d.get("model_name", None),
+            model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList),
+            run_id=d.get("run_id", None),
+            run_workspace_id=d.get("run_workspace_id", None),
+            schema_name=d.get("schema_name", None),
+            source=d.get("source", None),
+            status=_enum(d, "status", ModelVersionInfoStatus),
+            storage_location=d.get("storage_location", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            version=d.get("version", None),
+        )
 
 
 class ModelVersionInfoStatus(Enum):
     """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION
     status, then move to READY status once the model version files are uploaded and the model
-    version is finalized. Only model versions in READY status can be loaded for inference or served."""
+    version is finalized. Only model versions in READY status can be loaded for inference or served.
+    """
 
-    FAILED_REGISTRATION = 'FAILED_REGISTRATION'
-    PENDING_REGISTRATION = 'PENDING_REGISTRATION'
-    READY = 'READY'
+    FAILED_REGISTRATION = "FAILED_REGISTRATION"
+    PENDING_REGISTRATION = "PENDING_REGISTRATION"
+    READY = "READY"
 
 
 @dataclass
@@ -4695,34 +5649,40 @@ class MonitorCronSchedule:
     def as_dict(self) -> dict:
         """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status.value
         if self.quartz_cron_expression is not None:
-            body['quartz_cron_expression'] = self.quartz_cron_expression
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+            body["quartz_cron_expression"] = self.quartz_cron_expression
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status
         if self.quartz_cron_expression is not None:
-            body['quartz_cron_expression'] = self.quartz_cron_expression
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+            body["quartz_cron_expression"] = self.quartz_cron_expression
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorCronSchedule:
         """Deserializes the MonitorCronSchedule from a dictionary."""
-        return cls(pause_status=_enum(d, 'pause_status', MonitorCronSchedulePauseStatus),
-                   quartz_cron_expression=d.get('quartz_cron_expression', None),
-                   timezone_id=d.get('timezone_id', None))
+        return cls(
+            pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus),
+            quartz_cron_expression=d.get("quartz_cron_expression", None),
+            timezone_id=d.get("timezone_id", None),
+        )
 
 
 class MonitorCronSchedulePauseStatus(Enum):
     """Read only field that indicates whether a schedule is paused or not."""
 
-    PAUSED = 'PAUSED'
-    UNPAUSED = 'UNPAUSED'
+    PAUSED = "PAUSED"
+    UNPAUSED = "UNPAUSED"
 
 
 @dataclass
@@ -4733,19 +5693,21 @@ class MonitorDataClassificationConfig:
     def as_dict(self) -> dict:
         """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorDataClassificationConfig:
         """Deserializes the MonitorDataClassificationConfig from a dictionary."""
-        return cls(enabled=d.get('enabled', None))
+        return cls(enabled=d.get("enabled", None))
 
 
 @dataclass
@@ -4757,19 +5719,21 @@ class MonitorDestination:
     def as_dict(self) -> dict:
         """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.email_addresses: body['email_addresses'] = [v for v in self.email_addresses]
+        if self.email_addresses:
+            body["email_addresses"] = [v for v in self.email_addresses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.email_addresses: body['email_addresses'] = self.email_addresses
+        if self.email_addresses:
+            body["email_addresses"] = self.email_addresses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorDestination:
         """Deserializes the MonitorDestination from a dictionary."""
-        return cls(email_addresses=d.get('email_addresses', None))
+        return cls(email_addresses=d.get("email_addresses", None))
 
 
 @dataclass
@@ -4808,45 +5772,61 @@ class MonitorInferenceLog:
     def as_dict(self) -> dict:
         """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.granularities: body['granularities'] = [v for v in self.granularities]
-        if self.label_col is not None: body['label_col'] = self.label_col
-        if self.model_id_col is not None: body['model_id_col'] = self.model_id_col
-        if self.prediction_col is not None: body['prediction_col'] = self.prediction_col
-        if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col
-        if self.problem_type is not None: body['problem_type'] = self.problem_type.value
-        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        if self.granularities:
+            body["granularities"] = [v for v in self.granularities]
+        if self.label_col is not None:
+            body["label_col"] = self.label_col
+        if self.model_id_col is not None:
+            body["model_id_col"] = self.model_id_col
+        if self.prediction_col is not None:
+            body["prediction_col"] = self.prediction_col
+        if self.prediction_proba_col is not None:
+            body["prediction_proba_col"] = self.prediction_proba_col
+        if self.problem_type is not None:
+            body["problem_type"] = self.problem_type.value
+        if self.timestamp_col is not None:
+            body["timestamp_col"] = self.timestamp_col
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.granularities: body['granularities'] = self.granularities
-        if self.label_col is not None: body['label_col'] = self.label_col
-        if self.model_id_col is not None: body['model_id_col'] = self.model_id_col
-        if self.prediction_col is not None: body['prediction_col'] = self.prediction_col
-        if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col
-        if self.problem_type is not None: body['problem_type'] = self.problem_type
-        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        if self.granularities:
+            body["granularities"] = self.granularities
+        if self.label_col is not None:
+            body["label_col"] = self.label_col
+        if self.model_id_col is not None:
+            body["model_id_col"] = self.model_id_col
+        if self.prediction_col is not None:
+            body["prediction_col"] = self.prediction_col
+        if self.prediction_proba_col is not None:
+            body["prediction_proba_col"] = self.prediction_proba_col
+        if self.problem_type is not None:
+            body["problem_type"] = self.problem_type
+        if self.timestamp_col is not None:
+            body["timestamp_col"] = self.timestamp_col
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorInferenceLog:
         """Deserializes the MonitorInferenceLog from a dictionary."""
-        return cls(granularities=d.get('granularities', None),
-                   label_col=d.get('label_col', None),
-                   model_id_col=d.get('model_id_col', None),
-                   prediction_col=d.get('prediction_col', None),
-                   prediction_proba_col=d.get('prediction_proba_col', None),
-                   problem_type=_enum(d, 'problem_type', MonitorInferenceLogProblemType),
-                   timestamp_col=d.get('timestamp_col', None))
+        return cls(
+            granularities=d.get("granularities", None),
+            label_col=d.get("label_col", None),
+            model_id_col=d.get("model_id_col", None),
+            prediction_col=d.get("prediction_col", None),
+            prediction_proba_col=d.get("prediction_proba_col", None),
+            problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType),
+            timestamp_col=d.get("timestamp_col", None),
+        )
 
 
 class MonitorInferenceLogProblemType(Enum):
     """Problem type the model aims to solve. Determines the type of model-quality metrics that will be
     computed."""
 
-    PROBLEM_TYPE_CLASSIFICATION = 'PROBLEM_TYPE_CLASSIFICATION'
-    PROBLEM_TYPE_REGRESSION = 'PROBLEM_TYPE_REGRESSION'
+    PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION"
+    PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION"
 
 
 @dataclass
@@ -4916,89 +5896,122 @@ class MonitorInfo:
     def as_dict(self) -> dict:
         """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
-        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
-        if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics]
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.assets_dir is not None:
+            body["assets_dir"] = self.assets_dir
+        if self.baseline_table_name is not None:
+            body["baseline_table_name"] = self.baseline_table_name
+        if self.custom_metrics:
+            body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics]
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
         if self.data_classification_config:
-            body['data_classification_config'] = self.data_classification_config.as_dict()
+            body["data_classification_config"] = self.data_classification_config.as_dict()
         if self.drift_metrics_table_name is not None:
-            body['drift_metrics_table_name'] = self.drift_metrics_table_name
-        if self.inference_log: body['inference_log'] = self.inference_log.as_dict()
+            body["drift_metrics_table_name"] = self.drift_metrics_table_name
+        if self.inference_log:
+            body["inference_log"] = self.inference_log.as_dict()
         if self.latest_monitor_failure_msg is not None:
-            body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg
-        if self.monitor_version is not None: body['monitor_version'] = self.monitor_version
-        if self.notifications: body['notifications'] = self.notifications.as_dict()
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+            body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg
+        if self.monitor_version is not None:
+            body["monitor_version"] = self.monitor_version
+        if self.notifications:
+            body["notifications"] = self.notifications.as_dict()
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
         if self.profile_metrics_table_name is not None:
-            body['profile_metrics_table_name'] = self.profile_metrics_table_name
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
-        if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
-        if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
-        if self.status is not None: body['status'] = self.status.value
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.time_series: body['time_series'] = self.time_series.as_dict()
+            body["profile_metrics_table_name"] = self.profile_metrics_table_name
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
+        if self.slicing_exprs:
+            body["slicing_exprs"] = [v for v in self.slicing_exprs]
+        if self.snapshot:
+            body["snapshot"] = self.snapshot.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.time_series:
+            body["time_series"] = self.time_series.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
-        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
-        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.assets_dir is not None:
+            body["assets_dir"] = self.assets_dir
+        if self.baseline_table_name is not None:
+            body["baseline_table_name"] = self.baseline_table_name
+        if self.custom_metrics:
+            body["custom_metrics"] = self.custom_metrics
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
         if self.data_classification_config:
-            body['data_classification_config'] = self.data_classification_config
+            body["data_classification_config"] = self.data_classification_config
         if self.drift_metrics_table_name is not None:
-            body['drift_metrics_table_name'] = self.drift_metrics_table_name
-        if self.inference_log: body['inference_log'] = self.inference_log
+            body["drift_metrics_table_name"] = self.drift_metrics_table_name
+        if self.inference_log:
+            body["inference_log"] = self.inference_log
         if self.latest_monitor_failure_msg is not None:
-            body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg
-        if self.monitor_version is not None: body['monitor_version'] = self.monitor_version
-        if self.notifications: body['notifications'] = self.notifications
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+            body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg
+        if self.monitor_version is not None:
+            body["monitor_version"] = self.monitor_version
+        if self.notifications:
+            body["notifications"] = self.notifications
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
         if self.profile_metrics_table_name is not None:
-            body['profile_metrics_table_name'] = self.profile_metrics_table_name
-        if self.schedule: body['schedule'] = self.schedule
-        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
-        if self.snapshot: body['snapshot'] = self.snapshot
-        if self.status is not None: body['status'] = self.status
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.time_series: body['time_series'] = self.time_series
+            body["profile_metrics_table_name"] = self.profile_metrics_table_name
+        if self.schedule:
+            body["schedule"] = self.schedule
+        if self.slicing_exprs:
+            body["slicing_exprs"] = self.slicing_exprs
+        if self.snapshot:
+            body["snapshot"] = self.snapshot
+        if self.status is not None:
+            body["status"] = self.status
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.time_series:
+            body["time_series"] = self.time_series
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorInfo:
         """Deserializes the MonitorInfo from a dictionary."""
-        return cls(assets_dir=d.get('assets_dir', None),
-                   baseline_table_name=d.get('baseline_table_name', None),
-                   custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric),
-                   dashboard_id=d.get('dashboard_id', None),
-                   data_classification_config=_from_dict(d, 'data_classification_config',
-                                                         MonitorDataClassificationConfig),
-                   drift_metrics_table_name=d.get('drift_metrics_table_name', None),
-                   inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog),
-                   latest_monitor_failure_msg=d.get('latest_monitor_failure_msg', None),
-                   monitor_version=d.get('monitor_version', None),
-                   notifications=_from_dict(d, 'notifications', MonitorNotifications),
-                   output_schema_name=d.get('output_schema_name', None),
-                   profile_metrics_table_name=d.get('profile_metrics_table_name', None),
-                   schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
-                   slicing_exprs=d.get('slicing_exprs', None),
-                   snapshot=_from_dict(d, 'snapshot', MonitorSnapshot),
-                   status=_enum(d, 'status', MonitorInfoStatus),
-                   table_name=d.get('table_name', None),
-                   time_series=_from_dict(d, 'time_series', MonitorTimeSeries))
+        return cls(
+            assets_dir=d.get("assets_dir", None),
+            baseline_table_name=d.get("baseline_table_name", None),
+            custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric),
+            dashboard_id=d.get("dashboard_id", None),
+            data_classification_config=_from_dict(
+                d,
+                "data_classification_config",
+                MonitorDataClassificationConfig,
+            ),
+            drift_metrics_table_name=d.get("drift_metrics_table_name", None),
+            inference_log=_from_dict(d, "inference_log", MonitorInferenceLog),
+            latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None),
+            monitor_version=d.get("monitor_version", None),
+            notifications=_from_dict(d, "notifications", MonitorNotifications),
+            output_schema_name=d.get("output_schema_name", None),
+            profile_metrics_table_name=d.get("profile_metrics_table_name", None),
+            schedule=_from_dict(d, "schedule", MonitorCronSchedule),
+            slicing_exprs=d.get("slicing_exprs", None),
+            snapshot=_from_dict(d, "snapshot", MonitorSnapshot),
+            status=_enum(d, "status", MonitorInfoStatus),
+            table_name=d.get("table_name", None),
+            time_series=_from_dict(d, "time_series", MonitorTimeSeries),
+        )
 
 
 class MonitorInfoStatus(Enum):
     """The status of the monitor."""
 
-    MONITOR_STATUS_ACTIVE = 'MONITOR_STATUS_ACTIVE'
-    MONITOR_STATUS_DELETE_PENDING = 'MONITOR_STATUS_DELETE_PENDING'
-    MONITOR_STATUS_ERROR = 'MONITOR_STATUS_ERROR'
-    MONITOR_STATUS_FAILED = 'MONITOR_STATUS_FAILED'
-    MONITOR_STATUS_PENDING = 'MONITOR_STATUS_PENDING'
+    MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE"
+    MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING"
+    MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR"
+    MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED"
+    MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING"
 
 
 @dataclass
@@ -5031,31 +6044,43 @@ class MonitorMetric:
     def as_dict(self) -> dict:
         """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.input_columns: body['input_columns'] = [v for v in self.input_columns]
-        if self.name is not None: body['name'] = self.name
-        if self.output_data_type is not None: body['output_data_type'] = self.output_data_type
-        if self.type is not None: body['type'] = self.type.value
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.input_columns:
+            body["input_columns"] = [v for v in self.input_columns]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.output_data_type is not None:
+            body["output_data_type"] = self.output_data_type
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.input_columns: body['input_columns'] = self.input_columns
-        if self.name is not None: body['name'] = self.name
-        if self.output_data_type is not None: body['output_data_type'] = self.output_data_type
-        if self.type is not None: body['type'] = self.type
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.input_columns:
+            body["input_columns"] = self.input_columns
+        if self.name is not None:
+            body["name"] = self.name
+        if self.output_data_type is not None:
+            body["output_data_type"] = self.output_data_type
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorMetric:
         """Deserializes the MonitorMetric from a dictionary."""
-        return cls(definition=d.get('definition', None),
-                   input_columns=d.get('input_columns', None),
-                   name=d.get('name', None),
-                   output_data_type=d.get('output_data_type', None),
-                   type=_enum(d, 'type', MonitorMetricType))
+        return cls(
+            definition=d.get("definition", None),
+            input_columns=d.get("input_columns", None),
+            name=d.get("name", None),
+            output_data_type=d.get("output_data_type", None),
+            type=_enum(d, "type", MonitorMetricType),
+        )
 
 
 class MonitorMetricType(Enum):
@@ -5065,11 +6090,12 @@ class MonitorMetricType(Enum):
     ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the
     two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing
     columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate
-    metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics"""
+    metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics
+    """
 
-    CUSTOM_METRIC_TYPE_AGGREGATE = 'CUSTOM_METRIC_TYPE_AGGREGATE'
-    CUSTOM_METRIC_TYPE_DERIVED = 'CUSTOM_METRIC_TYPE_DERIVED'
-    CUSTOM_METRIC_TYPE_DRIFT = 'CUSTOM_METRIC_TYPE_DRIFT'
+    CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE"
+    CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED"
+    CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT"
 
 
 @dataclass
@@ -5083,25 +6109,28 @@ class MonitorNotifications:
     def as_dict(self) -> dict:
         """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.on_failure: body['on_failure'] = self.on_failure.as_dict()
+        if self.on_failure:
+            body["on_failure"] = self.on_failure.as_dict()
         if self.on_new_classification_tag_detected:
-            body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected.as_dict()
+            body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_failure:
+            body["on_failure"] = self.on_failure
         if self.on_new_classification_tag_detected:
-            body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected
+            body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorNotifications:
         """Deserializes the MonitorNotifications from a dictionary."""
-        return cls(on_failure=_from_dict(d, 'on_failure', MonitorDestination),
-                   on_new_classification_tag_detected=_from_dict(d, 'on_new_classification_tag_detected',
-                                                                 MonitorDestination))
+        return cls(
+            on_failure=_from_dict(d, "on_failure", MonitorDestination),
+            on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination),
+        )
 
 
 @dataclass
@@ -5127,51 +6156,65 @@ class MonitorRefreshInfo:
     def as_dict(self) -> dict:
         """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
-        if self.message is not None: body['message'] = self.message
-        if self.refresh_id is not None: body['refresh_id'] = self.refresh_id
-        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
-        if self.state is not None: body['state'] = self.state.value
-        if self.trigger is not None: body['trigger'] = self.trigger.value
+        if self.end_time_ms is not None:
+            body["end_time_ms"] = self.end_time_ms
+        if self.message is not None:
+            body["message"] = self.message
+        if self.refresh_id is not None:
+            body["refresh_id"] = self.refresh_id
+        if self.start_time_ms is not None:
+            body["start_time_ms"] = self.start_time_ms
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.trigger is not None:
+            body["trigger"] = self.trigger.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
-        if self.message is not None: body['message'] = self.message
-        if self.refresh_id is not None: body['refresh_id'] = self.refresh_id
-        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
-        if self.state is not None: body['state'] = self.state
-        if self.trigger is not None: body['trigger'] = self.trigger
+        if self.end_time_ms is not None:
+            body["end_time_ms"] = self.end_time_ms
+        if self.message is not None:
+            body["message"] = self.message
+        if self.refresh_id is not None:
+            body["refresh_id"] = self.refresh_id
+        if self.start_time_ms is not None:
+            body["start_time_ms"] = self.start_time_ms
+        if self.state is not None:
+            body["state"] = self.state
+        if self.trigger is not None:
+            body["trigger"] = self.trigger
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshInfo:
         """Deserializes the MonitorRefreshInfo from a dictionary."""
-        return cls(end_time_ms=d.get('end_time_ms', None),
-                   message=d.get('message', None),
-                   refresh_id=d.get('refresh_id', None),
-                   start_time_ms=d.get('start_time_ms', None),
-                   state=_enum(d, 'state', MonitorRefreshInfoState),
-                   trigger=_enum(d, 'trigger', MonitorRefreshInfoTrigger))
+        return cls(
+            end_time_ms=d.get("end_time_ms", None),
+            message=d.get("message", None),
+            refresh_id=d.get("refresh_id", None),
+            start_time_ms=d.get("start_time_ms", None),
+            state=_enum(d, "state", MonitorRefreshInfoState),
+            trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger),
+        )
 
 
 class MonitorRefreshInfoState(Enum):
     """The current state of the refresh."""
 
-    CANCELED = 'CANCELED'
-    FAILED = 'FAILED'
-    PENDING = 'PENDING'
-    RUNNING = 'RUNNING'
-    SUCCESS = 'SUCCESS'
+    CANCELED = "CANCELED"
+    FAILED = "FAILED"
+    PENDING = "PENDING"
+    RUNNING = "RUNNING"
+    SUCCESS = "SUCCESS"
 
 
 class MonitorRefreshInfoTrigger(Enum):
     """The method by which the refresh was triggered."""
 
-    MANUAL = 'MANUAL'
-    SCHEDULE = 'SCHEDULE'
+    MANUAL = "MANUAL"
+    SCHEDULE = "SCHEDULE"
 
 
 @dataclass
@@ -5182,19 +6225,21 @@ class MonitorRefreshListResponse:
     def as_dict(self) -> dict:
         """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.refreshes: body['refreshes'] = [v.as_dict() for v in self.refreshes]
+        if self.refreshes:
+            body["refreshes"] = [v.as_dict() for v in self.refreshes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.refreshes: body['refreshes'] = self.refreshes
+        if self.refreshes:
+            body["refreshes"] = self.refreshes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshListResponse:
         """Deserializes the MonitorRefreshListResponse from a dictionary."""
-        return cls(refreshes=_repeated_dict(d, 'refreshes', MonitorRefreshInfo))
+        return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo))
 
 
 @dataclass
@@ -5233,21 +6278,28 @@ class MonitorTimeSeries:
     def as_dict(self) -> dict:
         """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.granularities: body['granularities'] = [v for v in self.granularities]
-        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        if self.granularities:
+            body["granularities"] = [v for v in self.granularities]
+        if self.timestamp_col is not None:
+            body["timestamp_col"] = self.timestamp_col
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.granularities: body['granularities'] = self.granularities
-        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        if self.granularities:
+            body["granularities"] = self.granularities
+        if self.timestamp_col is not None:
+            body["timestamp_col"] = self.timestamp_col
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorTimeSeries:
         """Deserializes the MonitorTimeSeries from a dictionary."""
-        return cls(granularities=d.get('granularities', None), timestamp_col=d.get('timestamp_col', None))
+        return cls(
+            granularities=d.get("granularities", None),
+            timestamp_col=d.get("timestamp_col", None),
+        )
 
 
 @dataclass
@@ -5258,19 +6310,21 @@ class NamedTableConstraint:
     def as_dict(self) -> dict:
         """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NamedTableConstraint:
         """Deserializes the NamedTableConstraint from a dictionary."""
-        return cls(name=d.get('name', None))
+        return cls(name=d.get("name", None))
 
 
 @dataclass
@@ -5297,34 +6351,43 @@ class OnlineTable:
     def as_dict(self) -> dict:
         """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.spec: body['spec'] = self.spec.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
-        if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
+        if self.name is not None:
+            body["name"] = self.name
+        if self.spec:
+            body["spec"] = self.spec.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
+        if self.table_serving_url is not None:
+            body["table_serving_url"] = self.table_serving_url
         if self.unity_catalog_provisioning_state is not None:
-            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
+            body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OnlineTable into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.spec: body['spec'] = self.spec
-        if self.status: body['status'] = self.status
-        if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
+        if self.name is not None:
+            body["name"] = self.name
+        if self.spec:
+            body["spec"] = self.spec
+        if self.status:
+            body["status"] = self.status
+        if self.table_serving_url is not None:
+            body["table_serving_url"] = self.table_serving_url
         if self.unity_catalog_provisioning_state is not None:
-            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state
+            body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTable:
         """Deserializes the OnlineTable from a dictionary."""
-        return cls(name=d.get('name', None),
-                   spec=_from_dict(d, 'spec', OnlineTableSpec),
-                   status=_from_dict(d, 'status', OnlineTableStatus),
-                   table_serving_url=d.get('table_serving_url', None),
-                   unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state',
-                                                          ProvisioningInfoState))
+        return cls(
+            name=d.get("name", None),
+            spec=_from_dict(d, "spec", OnlineTableSpec),
+            status=_from_dict(d, "status", OnlineTableStatus),
+            table_serving_url=d.get("table_serving_url", None),
+            unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState),
+        )
 
 
 @dataclass
@@ -5361,40 +6424,57 @@ class OnlineTableSpec:
     def as_dict(self) -> dict:
         """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns]
-        if self.run_continuously: body['run_continuously'] = self.run_continuously.as_dict()
-        if self.run_triggered: body['run_triggered'] = self.run_triggered.as_dict()
+        if self.perform_full_copy is not None:
+            body["perform_full_copy"] = self.perform_full_copy
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.primary_key_columns:
+            body["primary_key_columns"] = [v for v in self.primary_key_columns]
+        if self.run_continuously:
+            body["run_continuously"] = self.run_continuously.as_dict()
+        if self.run_triggered:
+            body["run_triggered"] = self.run_triggered.as_dict()
         if self.source_table_full_name is not None:
-            body['source_table_full_name'] = self.source_table_full_name
-        if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
+            body["source_table_full_name"] = self.source_table_full_name
+        if self.timeseries_key is not None:
+            body["timeseries_key"] = self.timeseries_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns
-        if self.run_continuously: body['run_continuously'] = self.run_continuously
-        if self.run_triggered: body['run_triggered'] = self.run_triggered
+        if self.perform_full_copy is not None:
+            body["perform_full_copy"] = self.perform_full_copy
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.primary_key_columns:
+            body["primary_key_columns"] = self.primary_key_columns
+        if self.run_continuously:
+            body["run_continuously"] = self.run_continuously
+        if self.run_triggered:
+            body["run_triggered"] = self.run_triggered
         if self.source_table_full_name is not None:
-            body['source_table_full_name'] = self.source_table_full_name
-        if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
+            body["source_table_full_name"] = self.source_table_full_name
+        if self.timeseries_key is not None:
+            body["timeseries_key"] = self.timeseries_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpec:
         """Deserializes the OnlineTableSpec from a dictionary."""
-        return cls(perform_full_copy=d.get('perform_full_copy', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   primary_key_columns=d.get('primary_key_columns', None),
-                   run_continuously=_from_dict(d, 'run_continuously',
-                                               OnlineTableSpecContinuousSchedulingPolicy),
-                   run_triggered=_from_dict(d, 'run_triggered', OnlineTableSpecTriggeredSchedulingPolicy),
-                   source_table_full_name=d.get('source_table_full_name', None),
-                   timeseries_key=d.get('timeseries_key', None))
+        return cls(
+            perform_full_copy=d.get("perform_full_copy", None),
+            pipeline_id=d.get("pipeline_id", None),
+            primary_key_columns=d.get("primary_key_columns", None),
+            run_continuously=_from_dict(
+                d,
+                "run_continuously",
+                OnlineTableSpecContinuousSchedulingPolicy,
+            ),
+            run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy),
+            source_table_full_name=d.get("source_table_full_name", None),
+            timeseries_key=d.get("timeseries_key", None),
+        )
 
 
 @dataclass
@@ -5438,17 +6518,17 @@ def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecTriggeredSchedulingPolic
 class OnlineTableState(Enum):
     """The state of an online table."""
 
-    OFFLINE = 'OFFLINE'
-    OFFLINE_FAILED = 'OFFLINE_FAILED'
-    ONLINE = 'ONLINE'
-    ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE'
-    ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE'
-    ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED'
-    ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE'
-    ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES'
-    PROVISIONING = 'PROVISIONING'
-    PROVISIONING_INITIAL_SNAPSHOT = 'PROVISIONING_INITIAL_SNAPSHOT'
-    PROVISIONING_PIPELINE_RESOURCES = 'PROVISIONING_PIPELINE_RESOURCES'
+    OFFLINE = "OFFLINE"
+    OFFLINE_FAILED = "OFFLINE_FAILED"
+    ONLINE = "ONLINE"
+    ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE"
+    ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE"
+    ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED"
+    ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE"
+    ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES"
+    PROVISIONING = "PROVISIONING"
+    PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT"
+    PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES"
 
 
 @dataclass
@@ -5481,35 +6561,47 @@ def as_dict(self) -> dict:
         """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.continuous_update_status:
-            body['continuous_update_status'] = self.continuous_update_status.as_dict()
-        if self.detailed_state is not None: body['detailed_state'] = self.detailed_state.value
-        if self.failed_status: body['failed_status'] = self.failed_status.as_dict()
-        if self.message is not None: body['message'] = self.message
-        if self.provisioning_status: body['provisioning_status'] = self.provisioning_status.as_dict()
+            body["continuous_update_status"] = self.continuous_update_status.as_dict()
+        if self.detailed_state is not None:
+            body["detailed_state"] = self.detailed_state.value
+        if self.failed_status:
+            body["failed_status"] = self.failed_status.as_dict()
+        if self.message is not None:
+            body["message"] = self.message
+        if self.provisioning_status:
+            body["provisioning_status"] = self.provisioning_status.as_dict()
         if self.triggered_update_status:
-            body['triggered_update_status'] = self.triggered_update_status.as_dict()
+            body["triggered_update_status"] = self.triggered_update_status.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status
-        if self.detailed_state is not None: body['detailed_state'] = self.detailed_state
-        if self.failed_status: body['failed_status'] = self.failed_status
-        if self.message is not None: body['message'] = self.message
-        if self.provisioning_status: body['provisioning_status'] = self.provisioning_status
-        if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status
+        if self.continuous_update_status:
+            body["continuous_update_status"] = self.continuous_update_status
+        if self.detailed_state is not None:
+            body["detailed_state"] = self.detailed_state
+        if self.failed_status:
+            body["failed_status"] = self.failed_status
+        if self.message is not None:
+            body["message"] = self.message
+        if self.provisioning_status:
+            body["provisioning_status"] = self.provisioning_status
+        if self.triggered_update_status:
+            body["triggered_update_status"] = self.triggered_update_status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableStatus:
         """Deserializes the OnlineTableStatus from a dictionary."""
-        return cls(continuous_update_status=_from_dict(d, 'continuous_update_status', ContinuousUpdateStatus),
-                   detailed_state=_enum(d, 'detailed_state', OnlineTableState),
-                   failed_status=_from_dict(d, 'failed_status', FailedStatus),
-                   message=d.get('message', None),
-                   provisioning_status=_from_dict(d, 'provisioning_status', ProvisioningStatus),
-                   triggered_update_status=_from_dict(d, 'triggered_update_status', TriggeredUpdateStatus))
+        return cls(
+            continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus),
+            detailed_state=_enum(d, "detailed_state", OnlineTableState),
+            failed_status=_from_dict(d, "failed_status", FailedStatus),
+            message=d.get("message", None),
+            provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus),
+            triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus),
+        )
 
 
 @dataclass
@@ -5526,25 +6618,33 @@ class PermissionsChange:
     def as_dict(self) -> dict:
         """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.add: body['add'] = [v.value for v in self.add]
-        if self.principal is not None: body['principal'] = self.principal
-        if self.remove: body['remove'] = [v.value for v in self.remove]
+        if self.add:
+            body["add"] = [v.value for v in self.add]
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.remove:
+            body["remove"] = [v.value for v in self.remove]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.add: body['add'] = self.add
-        if self.principal is not None: body['principal'] = self.principal
-        if self.remove: body['remove'] = self.remove
+        if self.add:
+            body["add"] = self.add
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.remove:
+            body["remove"] = self.remove
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsChange:
         """Deserializes the PermissionsChange from a dictionary."""
-        return cls(add=_repeated_enum(d, 'add', Privilege),
-                   principal=d.get('principal', None),
-                   remove=_repeated_enum(d, 'remove', Privilege))
+        return cls(
+            add=_repeated_enum(d, "add", Privilege),
+            principal=d.get("principal", None),
+            remove=_repeated_enum(d, "remove", Privilege),
+        )
 
 
 @dataclass
@@ -5556,19 +6656,20 @@ def as_dict(self) -> dict:
         """Serializes the PermissionsList into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.privilege_assignments:
-            body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
+            body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionsList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        if self.privilege_assignments:
+            body["privilege_assignments"] = self.privilege_assignments
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsList:
         """Deserializes the PermissionsList from a dictionary."""
-        return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment))
+        return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment))
 
 
 @dataclass
@@ -5595,36 +6696,42 @@ def as_dict(self) -> dict:
         """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.estimated_completion_time_seconds is not None:
-            body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds
+            body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
         if self.latest_version_currently_processing is not None:
-            body['latest_version_currently_processing'] = self.latest_version_currently_processing
+            body["latest_version_currently_processing"] = self.latest_version_currently_processing
         if self.sync_progress_completion is not None:
-            body['sync_progress_completion'] = self.sync_progress_completion
-        if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count
-        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
+            body["sync_progress_completion"] = self.sync_progress_completion
+        if self.synced_row_count is not None:
+            body["synced_row_count"] = self.synced_row_count
+        if self.total_row_count is not None:
+            body["total_row_count"] = self.total_row_count
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.estimated_completion_time_seconds is not None:
-            body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds
+            body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds
         if self.latest_version_currently_processing is not None:
-            body['latest_version_currently_processing'] = self.latest_version_currently_processing
+            body["latest_version_currently_processing"] = self.latest_version_currently_processing
         if self.sync_progress_completion is not None:
-            body['sync_progress_completion'] = self.sync_progress_completion
-        if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count
-        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
+            body["sync_progress_completion"] = self.sync_progress_completion
+        if self.synced_row_count is not None:
+            body["synced_row_count"] = self.synced_row_count
+        if self.total_row_count is not None:
+            body["total_row_count"] = self.total_row_count
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineProgress:
         """Deserializes the PipelineProgress from a dictionary."""
-        return cls(estimated_completion_time_seconds=d.get('estimated_completion_time_seconds', None),
-                   latest_version_currently_processing=d.get('latest_version_currently_processing', None),
-                   sync_progress_completion=d.get('sync_progress_completion', None),
-                   synced_row_count=d.get('synced_row_count', None),
-                   total_row_count=d.get('total_row_count', None))
+        return cls(
+            estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None),
+            latest_version_currently_processing=d.get("latest_version_currently_processing", None),
+            sync_progress_completion=d.get("sync_progress_completion", None),
+            synced_row_count=d.get("synced_row_count", None),
+            total_row_count=d.get("total_row_count", None),
+        )
 
 
 @dataclass
@@ -5638,70 +6745,77 @@ class PrimaryKeyConstraint:
     def as_dict(self) -> dict:
         """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.child_columns: body['child_columns'] = [v for v in self.child_columns]
-        if self.name is not None: body['name'] = self.name
+        if self.child_columns:
+            body["child_columns"] = [v for v in self.child_columns]
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.child_columns: body['child_columns'] = self.child_columns
-        if self.name is not None: body['name'] = self.name
+        if self.child_columns:
+            body["child_columns"] = self.child_columns
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrimaryKeyConstraint:
         """Deserializes the PrimaryKeyConstraint from a dictionary."""
-        return cls(child_columns=d.get('child_columns', None), name=d.get('name', None))
+        return cls(
+            child_columns=d.get("child_columns", None),
+            name=d.get("name", None),
+        )
 
 
 class Privilege(Enum):
 
-    ACCESS = 'ACCESS'
-    ALL_PRIVILEGES = 'ALL_PRIVILEGES'
-    APPLY_TAG = 'APPLY_TAG'
-    CREATE = 'CREATE'
-    CREATE_CATALOG = 'CREATE_CATALOG'
-    CREATE_CONNECTION = 'CREATE_CONNECTION'
-    CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION'
-    CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE'
-    CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME'
-    CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG'
-    CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE'
-    CREATE_FUNCTION = 'CREATE_FUNCTION'
-    CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE'
-    CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW'
-    CREATE_MODEL = 'CREATE_MODEL'
-    CREATE_PROVIDER = 'CREATE_PROVIDER'
-    CREATE_RECIPIENT = 'CREATE_RECIPIENT'
-    CREATE_SCHEMA = 'CREATE_SCHEMA'
-    CREATE_SERVICE_CREDENTIAL = 'CREATE_SERVICE_CREDENTIAL'
-    CREATE_SHARE = 'CREATE_SHARE'
-    CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL'
-    CREATE_TABLE = 'CREATE_TABLE'
-    CREATE_VIEW = 'CREATE_VIEW'
-    CREATE_VOLUME = 'CREATE_VOLUME'
-    EXECUTE = 'EXECUTE'
-    MANAGE = 'MANAGE'
-    MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST'
-    MODIFY = 'MODIFY'
-    READ_FILES = 'READ_FILES'
-    READ_PRIVATE_FILES = 'READ_PRIVATE_FILES'
-    READ_VOLUME = 'READ_VOLUME'
-    REFRESH = 'REFRESH'
-    SELECT = 'SELECT'
-    SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION'
-    USAGE = 'USAGE'
-    USE_CATALOG = 'USE_CATALOG'
-    USE_CONNECTION = 'USE_CONNECTION'
-    USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS'
-    USE_PROVIDER = 'USE_PROVIDER'
-    USE_RECIPIENT = 'USE_RECIPIENT'
-    USE_SCHEMA = 'USE_SCHEMA'
-    USE_SHARE = 'USE_SHARE'
-    WRITE_FILES = 'WRITE_FILES'
-    WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES'
-    WRITE_VOLUME = 'WRITE_VOLUME'
+    ACCESS = "ACCESS"
+    ALL_PRIVILEGES = "ALL_PRIVILEGES"
+    APPLY_TAG = "APPLY_TAG"
+    CREATE = "CREATE"
+    CREATE_CATALOG = "CREATE_CATALOG"
+    CREATE_CONNECTION = "CREATE_CONNECTION"
+    CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION"
+    CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE"
+    CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME"
+    CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG"
+    CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE"
+    CREATE_FUNCTION = "CREATE_FUNCTION"
+    CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE"
+    CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW"
+    CREATE_MODEL = "CREATE_MODEL"
+    CREATE_PROVIDER = "CREATE_PROVIDER"
+    CREATE_RECIPIENT = "CREATE_RECIPIENT"
+    CREATE_SCHEMA = "CREATE_SCHEMA"
+    CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL"
+    CREATE_SHARE = "CREATE_SHARE"
+    CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL"
+    CREATE_TABLE = "CREATE_TABLE"
+    CREATE_VIEW = "CREATE_VIEW"
+    CREATE_VOLUME = "CREATE_VOLUME"
+    EXECUTE = "EXECUTE"
+    MANAGE = "MANAGE"
+    MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST"
+    MODIFY = "MODIFY"
+    READ_FILES = "READ_FILES"
+    READ_PRIVATE_FILES = "READ_PRIVATE_FILES"
+    READ_VOLUME = "READ_VOLUME"
+    REFRESH = "REFRESH"
+    SELECT = "SELECT"
+    SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION"
+    USAGE = "USAGE"
+    USE_CATALOG = "USE_CATALOG"
+    USE_CONNECTION = "USE_CONNECTION"
+    USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS"
+    USE_PROVIDER = "USE_PROVIDER"
+    USE_RECIPIENT = "USE_RECIPIENT"
+    USE_SCHEMA = "USE_SCHEMA"
+    USE_SHARE = "USE_SHARE"
+    WRITE_FILES = "WRITE_FILES"
+    WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES"
+    WRITE_VOLUME = "WRITE_VOLUME"
 
 
 @dataclass
@@ -5715,21 +6829,28 @@ class PrivilegeAssignment:
     def as_dict(self) -> dict:
         """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.privileges: body['privileges'] = [v.value for v in self.privileges]
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.privileges:
+            body["privileges"] = [v.value for v in self.privileges]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.privileges: body['privileges'] = self.privileges
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.privileges:
+            body["privileges"] = self.privileges
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment:
         """Deserializes the PrivilegeAssignment from a dictionary."""
-        return cls(principal=d.get('principal', None), privileges=_repeated_enum(d, 'privileges', Privilege))
+        return cls(
+            principal=d.get("principal", None),
+            privileges=_repeated_enum(d, "privileges", Privilege),
+        )
 
 
 PropertiesKvPairs = Dict[str, str]
@@ -5744,35 +6865,38 @@ class ProvisioningInfo:
     def as_dict(self) -> dict:
         """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.state is not None: body['state'] = self.state.value
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.state is not None: body['state'] = self.state
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo:
         """Deserializes the ProvisioningInfo from a dictionary."""
-        return cls(state=_enum(d, 'state', ProvisioningInfoState))
+        return cls(state=_enum(d, "state", ProvisioningInfoState))
 
 
 class ProvisioningInfoState(Enum):
 
-    ACTIVE = 'ACTIVE'
-    DEGRADED = 'DEGRADED'
-    DELETING = 'DELETING'
-    FAILED = 'FAILED'
-    PROVISIONING = 'PROVISIONING'
-    UPDATING = 'UPDATING'
+    ACTIVE = "ACTIVE"
+    DEGRADED = "DEGRADED"
+    DELETING = "DELETING"
+    FAILED = "FAILED"
+    PROVISIONING = "PROVISIONING"
+    UPDATING = "UPDATING"
 
 
 @dataclass
 class ProvisioningStatus:
     """Detailed status of an online table. Shown if the online table is in the
-    PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state."""
+    PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.
+    """
 
     initial_pipeline_sync_progress: Optional[PipelineProgress] = None
     """Details about initial data synchronization. Only populated when in the
@@ -5782,21 +6906,20 @@ def as_dict(self) -> dict:
         """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.initial_pipeline_sync_progress:
-            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict()
+            body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.initial_pipeline_sync_progress:
-            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress
+            body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus:
         """Deserializes the ProvisioningStatus from a dictionary."""
-        return cls(
-            initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress))
+        return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress))
 
 
 @dataclass
@@ -5822,35 +6945,48 @@ class QuotaInfo:
     def as_dict(self) -> dict:
         """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at
-        if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name
+        if self.last_refreshed_at is not None:
+            body["last_refreshed_at"] = self.last_refreshed_at
+        if self.parent_full_name is not None:
+            body["parent_full_name"] = self.parent_full_name
         if self.parent_securable_type is not None:
-            body['parent_securable_type'] = self.parent_securable_type.value
-        if self.quota_count is not None: body['quota_count'] = self.quota_count
-        if self.quota_limit is not None: body['quota_limit'] = self.quota_limit
-        if self.quota_name is not None: body['quota_name'] = self.quota_name
+            body["parent_securable_type"] = self.parent_securable_type.value
+        if self.quota_count is not None:
+            body["quota_count"] = self.quota_count
+        if self.quota_limit is not None:
+            body["quota_limit"] = self.quota_limit
+        if self.quota_name is not None:
+            body["quota_name"] = self.quota_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at
-        if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name
-        if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type
-        if self.quota_count is not None: body['quota_count'] = self.quota_count
-        if self.quota_limit is not None: body['quota_limit'] = self.quota_limit
-        if self.quota_name is not None: body['quota_name'] = self.quota_name
+        if self.last_refreshed_at is not None:
+            body["last_refreshed_at"] = self.last_refreshed_at
+        if self.parent_full_name is not None:
+            body["parent_full_name"] = self.parent_full_name
+        if self.parent_securable_type is not None:
+            body["parent_securable_type"] = self.parent_securable_type
+        if self.quota_count is not None:
+            body["quota_count"] = self.quota_count
+        if self.quota_limit is not None:
+            body["quota_limit"] = self.quota_limit
+        if self.quota_name is not None:
+            body["quota_name"] = self.quota_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QuotaInfo:
         """Deserializes the QuotaInfo from a dictionary."""
-        return cls(last_refreshed_at=d.get('last_refreshed_at', None),
-                   parent_full_name=d.get('parent_full_name', None),
-                   parent_securable_type=_enum(d, 'parent_securable_type', SecurableType),
-                   quota_count=d.get('quota_count', None),
-                   quota_limit=d.get('quota_limit', None),
-                   quota_name=d.get('quota_name', None))
+        return cls(
+            last_refreshed_at=d.get("last_refreshed_at", None),
+            parent_full_name=d.get("parent_full_name", None),
+            parent_securable_type=_enum(d, "parent_securable_type", SecurableType),
+            quota_count=d.get("quota_count", None),
+            quota_limit=d.get("quota_limit", None),
+            quota_name=d.get("quota_name", None),
+        )
 
 
 @dataclass
@@ -5870,25 +7006,33 @@ class R2Credentials:
     def as_dict(self) -> dict:
         """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
-        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
-        if self.session_token is not None: body['session_token'] = self.session_token
+        if self.access_key_id is not None:
+            body["access_key_id"] = self.access_key_id
+        if self.secret_access_key is not None:
+            body["secret_access_key"] = self.secret_access_key
+        if self.session_token is not None:
+            body["session_token"] = self.session_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the R2Credentials into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
-        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
-        if self.session_token is not None: body['session_token'] = self.session_token
+        if self.access_key_id is not None:
+            body["access_key_id"] = self.access_key_id
+        if self.secret_access_key is not None:
+            body["secret_access_key"] = self.secret_access_key
+        if self.session_token is not None:
+            body["session_token"] = self.session_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> R2Credentials:
         """Deserializes the R2Credentials from a dictionary."""
-        return cls(access_key_id=d.get('access_key_id', None),
-                   secret_access_key=d.get('secret_access_key', None),
-                   session_token=d.get('session_token', None))
+        return cls(
+            access_key_id=d.get("access_key_id", None),
+            secret_access_key=d.get("secret_access_key", None),
+            session_token=d.get("session_token", None),
+        )
 
 
 @dataclass
@@ -5903,21 +7047,28 @@ class RegenerateDashboardRequest:
     def as_dict(self) -> dict:
         """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest:
         """Deserializes the RegenerateDashboardRequest from a dictionary."""
-        return cls(table_name=d.get('table_name', None), warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            table_name=d.get("table_name", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -5931,21 +7082,28 @@ class RegenerateDashboardResponse:
     def as_dict(self) -> dict:
         """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.parent_folder is not None:
+            body["parent_folder"] = self.parent_folder
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.parent_folder is not None:
+            body["parent_folder"] = self.parent_folder
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse:
         """Deserializes the RegenerateDashboardResponse from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None), parent_folder=d.get('parent_folder', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            parent_folder=d.get("parent_folder", None),
+        )
 
 
 @dataclass
@@ -5961,21 +7119,28 @@ class RegisteredModelAlias:
     def as_dict(self) -> dict:
         """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alias_name is not None: body['alias_name'] = self.alias_name
-        if self.version_num is not None: body['version_num'] = self.version_num
+        if self.alias_name is not None:
+            body["alias_name"] = self.alias_name
+        if self.version_num is not None:
+            body["version_num"] = self.version_num
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alias_name is not None: body['alias_name'] = self.alias_name
-        if self.version_num is not None: body['version_num'] = self.version_num
+        if self.alias_name is not None:
+            body["alias_name"] = self.alias_name
+        if self.version_num is not None:
+            body["version_num"] = self.version_num
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAlias:
         """Deserializes the RegisteredModelAlias from a dictionary."""
-        return cls(alias_name=d.get('alias_name', None), version_num=d.get('version_num', None))
+        return cls(
+            alias_name=d.get("alias_name", None),
+            version_num=d.get("version_num", None),
+        )
 
 
 @dataclass
@@ -6026,58 +7191,88 @@ class RegisteredModelInfo:
     def as_dict(self) -> dict:
         """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases]
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.aliases:
+            body["aliases"] = [v.as_dict() for v in self.aliases]
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aliases: body['aliases'] = self.aliases
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.aliases:
+            body["aliases"] = self.aliases
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelInfo:
         """Deserializes the RegisteredModelInfo from a dictionary."""
-        return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias),
-                   browse_only=d.get('browse_only', None),
-                   catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   full_name=d.get('full_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   schema_name=d.get('schema_name', None),
-                   storage_location=d.get('storage_location', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            aliases=_repeated_dict(d, "aliases", RegisteredModelAlias),
+            browse_only=d.get("browse_only", None),
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            full_name=d.get("full_name", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            schema_name=d.get("schema_name", None),
+            storage_location=d.get("storage_location", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -6139,78 +7334,116 @@ class SchemaInfo:
     def as_dict(self) -> dict:
         """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.catalog_type is not None:
+            body["catalog_type"] = self.catalog_type
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.effective_predictive_optimization_flag:
-            body[
-                'effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict(
-                )
+            body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict()
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization.value
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
-        if self.schema_id is not None: body['schema_id'] = self.schema_id
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization.value
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
+        if self.schema_id is not None:
+            body["schema_id"] = self.schema_id
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.catalog_type is not None:
+            body["catalog_type"] = self.catalog_type
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.effective_predictive_optimization_flag:
-            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+            body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
-        if self.schema_id is not None: body['schema_id'] = self.schema_id
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
+        if self.schema_id is not None:
+            body["schema_id"] = self.schema_id
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SchemaInfo:
         """Deserializes the SchemaInfo from a dictionary."""
-        return cls(browse_only=d.get('browse_only', None),
-                   catalog_name=d.get('catalog_name', None),
-                   catalog_type=d.get('catalog_type', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   effective_predictive_optimization_flag=_from_dict(
-                       d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag),
-                   enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
-                                                        EnablePredictiveOptimization),
-                   full_name=d.get('full_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   properties=d.get('properties', None),
-                   schema_id=d.get('schema_id', None),
-                   storage_location=d.get('storage_location', None),
-                   storage_root=d.get('storage_root', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            browse_only=d.get("browse_only", None),
+            catalog_name=d.get("catalog_name", None),
+            catalog_type=d.get("catalog_type", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            effective_predictive_optimization_flag=_from_dict(
+                d,
+                "effective_predictive_optimization_flag",
+                EffectivePredictiveOptimizationFlag,
+            ),
+            enable_predictive_optimization=_enum(
+                d,
+                "enable_predictive_optimization",
+                EnablePredictiveOptimization,
+            ),
+            full_name=d.get("full_name", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            properties=d.get("properties", None),
+            schema_id=d.get("schema_id", None),
+            storage_location=d.get("storage_location", None),
+            storage_root=d.get("storage_root", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 SecurableOptionsMap = Dict[str, str]
@@ -6221,21 +7454,21 @@ def from_dict(cls, d: Dict[str, any]) -> SchemaInfo:
 class SecurableType(Enum):
     """The type of Unity Catalog securable"""
 
-    CATALOG = 'CATALOG'
-    CLEAN_ROOM = 'CLEAN_ROOM'
-    CONNECTION = 'CONNECTION'
-    CREDENTIAL = 'CREDENTIAL'
-    EXTERNAL_LOCATION = 'EXTERNAL_LOCATION'
-    FUNCTION = 'FUNCTION'
-    METASTORE = 'METASTORE'
-    PIPELINE = 'PIPELINE'
-    PROVIDER = 'PROVIDER'
-    RECIPIENT = 'RECIPIENT'
-    SCHEMA = 'SCHEMA'
-    SHARE = 'SHARE'
-    STORAGE_CREDENTIAL = 'STORAGE_CREDENTIAL'
-    TABLE = 'TABLE'
-    VOLUME = 'VOLUME'
+    CATALOG = "CATALOG"
+    CLEAN_ROOM = "CLEAN_ROOM"
+    CONNECTION = "CONNECTION"
+    CREDENTIAL = "CREDENTIAL"
+    EXTERNAL_LOCATION = "EXTERNAL_LOCATION"
+    FUNCTION = "FUNCTION"
+    METASTORE = "METASTORE"
+    PIPELINE = "PIPELINE"
+    PROVIDER = "PROVIDER"
+    RECIPIENT = "RECIPIENT"
+    SCHEMA = "SCHEMA"
+    SHARE = "SHARE"
+    STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL"
+    TABLE = "TABLE"
+    VOLUME = "VOLUME"
 
 
 @dataclass
@@ -6249,22 +7482,28 @@ class SetArtifactAllowlist:
     def as_dict(self) -> dict:
         """Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact_matchers: body['artifact_matchers'] = [v.as_dict() for v in self.artifact_matchers]
-        if self.artifact_type is not None: body['artifact_type'] = self.artifact_type.value
+        if self.artifact_matchers:
+            body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers]
+        if self.artifact_type is not None:
+            body["artifact_type"] = self.artifact_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers
-        if self.artifact_type is not None: body['artifact_type'] = self.artifact_type
+        if self.artifact_matchers:
+            body["artifact_matchers"] = self.artifact_matchers
+        if self.artifact_type is not None:
+            body["artifact_type"] = self.artifact_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetArtifactAllowlist:
         """Deserializes the SetArtifactAllowlist from a dictionary."""
-        return cls(artifact_matchers=_repeated_dict(d, 'artifact_matchers', ArtifactMatcher),
-                   artifact_type=_enum(d, 'artifact_type', ArtifactType))
+        return cls(
+            artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher),
+            artifact_type=_enum(d, "artifact_type", ArtifactType),
+        )
 
 
 @dataclass
@@ -6281,25 +7520,33 @@ class SetRegisteredModelAliasRequest:
     def as_dict(self) -> dict:
         """Serializes the SetRegisteredModelAliasRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alias is not None: body['alias'] = self.alias
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.version_num is not None: body['version_num'] = self.version_num
+        if self.alias is not None:
+            body["alias"] = self.alias
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.version_num is not None:
+            body["version_num"] = self.version_num
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alias is not None: body['alias'] = self.alias
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.version_num is not None: body['version_num'] = self.version_num
+        if self.alias is not None:
+            body["alias"] = self.alias
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.version_num is not None:
+            body["version_num"] = self.version_num
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetRegisteredModelAliasRequest:
         """Deserializes the SetRegisteredModelAliasRequest from a dictionary."""
-        return cls(alias=d.get('alias', None),
-                   full_name=d.get('full_name', None),
-                   version_num=d.get('version_num', None))
+        return cls(
+            alias=d.get("alias", None),
+            full_name=d.get("full_name", None),
+            version_num=d.get("version_num", None),
+        )
 
 
 @dataclass
@@ -6315,29 +7562,35 @@ class SseEncryptionDetails:
     def as_dict(self) -> dict:
         """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.algorithm is not None: body['algorithm'] = self.algorithm.value
-        if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn
+        if self.algorithm is not None:
+            body["algorithm"] = self.algorithm.value
+        if self.aws_kms_key_arn is not None:
+            body["aws_kms_key_arn"] = self.aws_kms_key_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.algorithm is not None: body['algorithm'] = self.algorithm
-        if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn
+        if self.algorithm is not None:
+            body["algorithm"] = self.algorithm
+        if self.aws_kms_key_arn is not None:
+            body["aws_kms_key_arn"] = self.aws_kms_key_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SseEncryptionDetails:
         """Deserializes the SseEncryptionDetails from a dictionary."""
-        return cls(algorithm=_enum(d, 'algorithm', SseEncryptionDetailsAlgorithm),
-                   aws_kms_key_arn=d.get('aws_kms_key_arn', None))
+        return cls(
+            algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm),
+            aws_kms_key_arn=d.get("aws_kms_key_arn", None),
+        )
 
 
 class SseEncryptionDetailsAlgorithm(Enum):
     """The type of key encryption to use (affects headers from s3 client)."""
 
-    AWS_SSE_KMS = 'AWS_SSE_KMS'
-    AWS_SSE_S3 = 'AWS_SSE_S3'
+    AWS_SSE_KMS = "AWS_SSE_KMS"
+    AWS_SSE_S3 = "AWS_SSE_S3"
 
 
 @dataclass
@@ -6398,77 +7651,112 @@ class StorageCredentialInfo:
     def as_dict(self) -> dict:
         """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.id is not None: body['id'] = self.id
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         if self.used_for_managed_storage is not None:
-            body['used_for_managed_storage'] = self.used_for_managed_storage
+            body["used_for_managed_storage"] = self.used_for_managed_storage
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.id is not None: body['id'] = self.id
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         if self.used_for_managed_storage is not None:
-            body['used_for_managed_storage'] = self.used_for_managed_storage
+            body["used_for_managed_storage"] = self.used_for_managed_storage
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo:
         """Deserializes the StorageCredentialInfo from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleResponse),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity',
-                                                     AzureManagedIdentityResponse),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccountResponse),
-                   full_name=d.get('full_name', None),
-                   id=d.get('id', None),
-                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   read_only=d.get('read_only', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   used_for_managed_storage=d.get('used_for_managed_storage', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccountResponse,
+            ),
+            full_name=d.get("full_name", None),
+            id=d.get("id", None),
+            isolation_mode=_enum(d, "isolation_mode", IsolationMode),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            read_only=d.get("read_only", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            used_for_managed_storage=d.get("used_for_managed_storage", None),
+        )
 
 
 @dataclass
@@ -6483,38 +7771,46 @@ class SystemSchemaInfo:
     def as_dict(self) -> dict:
         """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.schema is not None: body['schema'] = self.schema
-        if self.state is not None: body['state'] = self.state.value
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.schema is not None: body['schema'] = self.schema
-        if self.state is not None: body['state'] = self.state
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SystemSchemaInfo:
         """Deserializes the SystemSchemaInfo from a dictionary."""
-        return cls(schema=d.get('schema', None), state=_enum(d, 'state', SystemSchemaInfoState))
+        return cls(
+            schema=d.get("schema", None),
+            state=_enum(d, "state", SystemSchemaInfoState),
+        )
 
 
 class SystemSchemaInfoState(Enum):
     """The current state of enablement for the system schema. An empty string means the system schema
     is available and ready for opt-in."""
 
-    AVAILABLE = 'AVAILABLE'
-    DISABLE_INITIALIZED = 'DISABLE_INITIALIZED'
-    ENABLE_COMPLETED = 'ENABLE_COMPLETED'
-    ENABLE_INITIALIZED = 'ENABLE_INITIALIZED'
-    UNAVAILABLE = 'UNAVAILABLE'
+    AVAILABLE = "AVAILABLE"
+    DISABLE_INITIALIZED = "DISABLE_INITIALIZED"
+    ENABLE_COMPLETED = "ENABLE_COMPLETED"
+    ENABLE_INITIALIZED = "ENABLE_INITIALIZED"
+    UNAVAILABLE = "UNAVAILABLE"
 
 
 @dataclass
 class TableConstraint:
     """A table constraint, as defined by *one* of the following fields being set:
-    __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__."""
+    __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.
+    """
 
     foreign_key_constraint: Optional[ForeignKeyConstraint] = None
 
@@ -6525,25 +7821,33 @@ class TableConstraint:
     def as_dict(self) -> dict:
         """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint.as_dict()
-        if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint.as_dict()
-        if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint.as_dict()
+        if self.foreign_key_constraint:
+            body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict()
+        if self.named_table_constraint:
+            body["named_table_constraint"] = self.named_table_constraint.as_dict()
+        if self.primary_key_constraint:
+            body["primary_key_constraint"] = self.primary_key_constraint.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableConstraint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint
-        if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint
-        if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint
+        if self.foreign_key_constraint:
+            body["foreign_key_constraint"] = self.foreign_key_constraint
+        if self.named_table_constraint:
+            body["named_table_constraint"] = self.named_table_constraint
+        if self.primary_key_constraint:
+            body["primary_key_constraint"] = self.primary_key_constraint
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableConstraint:
         """Deserializes the TableConstraint from a dictionary."""
-        return cls(foreign_key_constraint=_from_dict(d, 'foreign_key_constraint', ForeignKeyConstraint),
-                   named_table_constraint=_from_dict(d, 'named_table_constraint', NamedTableConstraint),
-                   primary_key_constraint=_from_dict(d, 'primary_key_constraint', PrimaryKeyConstraint))
+        return cls(
+            foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint),
+            named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint),
+            primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint),
+        )
 
 
 @dataclass
@@ -6557,19 +7861,21 @@ class TableDependency:
     def as_dict(self) -> dict:
         """Serializes the TableDependency into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.table_full_name is not None: body['table_full_name'] = self.table_full_name
+        if self.table_full_name is not None:
+            body["table_full_name"] = self.table_full_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableDependency into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.table_full_name is not None: body['table_full_name'] = self.table_full_name
+        if self.table_full_name is not None:
+            body["table_full_name"] = self.table_full_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableDependency:
         """Deserializes the TableDependency from a dictionary."""
-        return cls(table_full_name=d.get('table_full_name', None))
+        return cls(table_full_name=d.get("table_full_name", None))
 
 
 @dataclass
@@ -6580,19 +7886,21 @@ class TableExistsResponse:
     def as_dict(self) -> dict:
         """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.table_exists is not None: body['table_exists'] = self.table_exists
+        if self.table_exists is not None:
+            body["table_exists"] = self.table_exists
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.table_exists is not None: body['table_exists'] = self.table_exists
+        if self.table_exists is not None:
+            body["table_exists"] = self.table_exists
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableExistsResponse:
         """Deserializes the TableExistsResponse from a dictionary."""
-        return cls(table_exists=d.get('table_exists', None))
+        return cls(table_exists=d.get("table_exists", None))
 
 
 @dataclass
@@ -6700,133 +8008,196 @@ class TableInfo:
     def as_dict(self) -> dict:
         """Serializes the TableInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.data_access_configuration_id is not None:
-            body['data_access_configuration_id'] = self.data_access_configuration_id
-        if self.data_source_format is not None: body['data_source_format'] = self.data_source_format.value
-        if self.deleted_at is not None: body['deleted_at'] = self.deleted_at
+            body["data_access_configuration_id"] = self.data_access_configuration_id
+        if self.data_source_format is not None:
+            body["data_source_format"] = self.data_source_format.value
+        if self.deleted_at is not None:
+            body["deleted_at"] = self.deleted_at
         if self.delta_runtime_properties_kvpairs:
-            body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs.as_dict()
+            body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict()
         if self.effective_predictive_optimization_flag:
-            body[
-                'effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict(
-                )
+            body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict()
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization.value
-        if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.properties: body['properties'] = self.properties
-        if self.row_filter: body['row_filter'] = self.row_filter.as_dict()
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.sql_path is not None: body['sql_path'] = self.sql_path
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization.value
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details.as_dict()
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.properties:
+            body["properties"] = self.properties
+        if self.row_filter:
+            body["row_filter"] = self.row_filter.as_dict()
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.sql_path is not None:
+            body["sql_path"] = self.sql_path
         if self.storage_credential_name is not None:
-            body['storage_credential_name'] = self.storage_credential_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.table_constraints: body['table_constraints'] = [v.as_dict() for v in self.table_constraints]
-        if self.table_id is not None: body['table_id'] = self.table_id
-        if self.table_type is not None: body['table_type'] = self.table_type.value
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.view_definition is not None: body['view_definition'] = self.view_definition
-        if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict()
+            body["storage_credential_name"] = self.storage_credential_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.table_constraints:
+            body["table_constraints"] = [v.as_dict() for v in self.table_constraints]
+        if self.table_id is not None:
+            body["table_id"] = self.table_id
+        if self.table_type is not None:
+            body["table_type"] = self.table_type.value
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.view_definition is not None:
+            body["view_definition"] = self.view_definition
+        if self.view_dependencies:
+            body["view_dependencies"] = self.view_dependencies.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.columns: body['columns'] = self.columns
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.columns:
+            body["columns"] = self.columns
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.data_access_configuration_id is not None:
-            body['data_access_configuration_id'] = self.data_access_configuration_id
-        if self.data_source_format is not None: body['data_source_format'] = self.data_source_format
-        if self.deleted_at is not None: body['deleted_at'] = self.deleted_at
+            body["data_access_configuration_id"] = self.data_access_configuration_id
+        if self.data_source_format is not None:
+            body["data_source_format"] = self.data_source_format
+        if self.deleted_at is not None:
+            body["deleted_at"] = self.deleted_at
         if self.delta_runtime_properties_kvpairs:
-            body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs
+            body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs
         if self.effective_predictive_optimization_flag:
-            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+            body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization
-        if self.encryption_details: body['encryption_details'] = self.encryption_details
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.properties: body['properties'] = self.properties
-        if self.row_filter: body['row_filter'] = self.row_filter
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.sql_path is not None: body['sql_path'] = self.sql_path
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.properties:
+            body["properties"] = self.properties
+        if self.row_filter:
+            body["row_filter"] = self.row_filter
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.sql_path is not None:
+            body["sql_path"] = self.sql_path
         if self.storage_credential_name is not None:
-            body['storage_credential_name'] = self.storage_credential_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.table_constraints: body['table_constraints'] = self.table_constraints
-        if self.table_id is not None: body['table_id'] = self.table_id
-        if self.table_type is not None: body['table_type'] = self.table_type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.view_definition is not None: body['view_definition'] = self.view_definition
-        if self.view_dependencies: body['view_dependencies'] = self.view_dependencies
+            body["storage_credential_name"] = self.storage_credential_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.table_constraints:
+            body["table_constraints"] = self.table_constraints
+        if self.table_id is not None:
+            body["table_id"] = self.table_id
+        if self.table_type is not None:
+            body["table_type"] = self.table_type
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.view_definition is not None:
+            body["view_definition"] = self.view_definition
+        if self.view_dependencies:
+            body["view_dependencies"] = self.view_dependencies
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableInfo:
         """Deserializes the TableInfo from a dictionary."""
-        return cls(access_point=d.get('access_point', None),
-                   browse_only=d.get('browse_only', None),
-                   catalog_name=d.get('catalog_name', None),
-                   columns=_repeated_dict(d, 'columns', ColumnInfo),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   data_access_configuration_id=d.get('data_access_configuration_id', None),
-                   data_source_format=_enum(d, 'data_source_format', DataSourceFormat),
-                   deleted_at=d.get('deleted_at', None),
-                   delta_runtime_properties_kvpairs=_from_dict(d, 'delta_runtime_properties_kvpairs',
-                                                               DeltaRuntimePropertiesKvPairs),
-                   effective_predictive_optimization_flag=_from_dict(
-                       d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag),
-                   enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
-                                                        EnablePredictiveOptimization),
-                   encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
-                   full_name=d.get('full_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   properties=d.get('properties', None),
-                   row_filter=_from_dict(d, 'row_filter', TableRowFilter),
-                   schema_name=d.get('schema_name', None),
-                   sql_path=d.get('sql_path', None),
-                   storage_credential_name=d.get('storage_credential_name', None),
-                   storage_location=d.get('storage_location', None),
-                   table_constraints=_repeated_dict(d, 'table_constraints', TableConstraint),
-                   table_id=d.get('table_id', None),
-                   table_type=_enum(d, 'table_type', TableType),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   view_definition=d.get('view_definition', None),
-                   view_dependencies=_from_dict(d, 'view_dependencies', DependencyList))
+        return cls(
+            access_point=d.get("access_point", None),
+            browse_only=d.get("browse_only", None),
+            catalog_name=d.get("catalog_name", None),
+            columns=_repeated_dict(d, "columns", ColumnInfo),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            data_access_configuration_id=d.get("data_access_configuration_id", None),
+            data_source_format=_enum(d, "data_source_format", DataSourceFormat),
+            deleted_at=d.get("deleted_at", None),
+            delta_runtime_properties_kvpairs=_from_dict(
+                d,
+                "delta_runtime_properties_kvpairs",
+                DeltaRuntimePropertiesKvPairs,
+            ),
+            effective_predictive_optimization_flag=_from_dict(
+                d,
+                "effective_predictive_optimization_flag",
+                EffectivePredictiveOptimizationFlag,
+            ),
+            enable_predictive_optimization=_enum(
+                d,
+                "enable_predictive_optimization",
+                EnablePredictiveOptimization,
+            ),
+            encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
+            full_name=d.get("full_name", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            pipeline_id=d.get("pipeline_id", None),
+            properties=d.get("properties", None),
+            row_filter=_from_dict(d, "row_filter", TableRowFilter),
+            schema_name=d.get("schema_name", None),
+            sql_path=d.get("sql_path", None),
+            storage_credential_name=d.get("storage_credential_name", None),
+            storage_location=d.get("storage_location", None),
+            table_constraints=_repeated_dict(d, "table_constraints", TableConstraint),
+            table_id=d.get("table_id", None),
+            table_type=_enum(d, "table_type", TableType),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            view_definition=d.get("view_definition", None),
+            view_dependencies=_from_dict(d, "view_dependencies", DependencyList),
+        )
 
 
 class TableOperation(Enum):
 
-    READ = 'READ'
-    READ_WRITE = 'READ_WRITE'
+    READ = "READ"
+    READ_WRITE = "READ_WRITE"
 
 
 @dataclass
@@ -6841,22 +8212,28 @@ class TableRowFilter:
     def as_dict(self) -> dict:
         """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.function_name is not None: body['function_name'] = self.function_name
-        if self.input_column_names: body['input_column_names'] = [v for v in self.input_column_names]
+        if self.function_name is not None:
+            body["function_name"] = self.function_name
+        if self.input_column_names:
+            body["input_column_names"] = [v for v in self.input_column_names]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.function_name is not None: body['function_name'] = self.function_name
-        if self.input_column_names: body['input_column_names'] = self.input_column_names
+        if self.function_name is not None:
+            body["function_name"] = self.function_name
+        if self.input_column_names:
+            body["input_column_names"] = self.input_column_names
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableRowFilter:
         """Deserializes the TableRowFilter from a dictionary."""
-        return cls(function_name=d.get('function_name', None),
-                   input_column_names=d.get('input_column_names', None))
+        return cls(
+            function_name=d.get("function_name", None),
+            input_column_names=d.get("input_column_names", None),
+        )
 
 
 @dataclass
@@ -6869,33 +8246,40 @@ class TableSummary:
     def as_dict(self) -> dict:
         """Serializes the TableSummary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.table_type is not None: body['table_type'] = self.table_type.value
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.table_type is not None:
+            body["table_type"] = self.table_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableSummary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.table_type is not None: body['table_type'] = self.table_type
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.table_type is not None:
+            body["table_type"] = self.table_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSummary:
         """Deserializes the TableSummary from a dictionary."""
-        return cls(full_name=d.get('full_name', None), table_type=_enum(d, 'table_type', TableType))
+        return cls(
+            full_name=d.get("full_name", None),
+            table_type=_enum(d, "table_type", TableType),
+        )
 
 
 class TableType(Enum):
 
-    EXTERNAL = 'EXTERNAL'
-    EXTERNAL_SHALLOW_CLONE = 'EXTERNAL_SHALLOW_CLONE'
-    FOREIGN = 'FOREIGN'
-    MANAGED = 'MANAGED'
-    MANAGED_SHALLOW_CLONE = 'MANAGED_SHALLOW_CLONE'
-    MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
-    STREAMING_TABLE = 'STREAMING_TABLE'
-    VIEW = 'VIEW'
+    EXTERNAL = "EXTERNAL"
+    EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE"
+    FOREIGN = "FOREIGN"
+    MANAGED = "MANAGED"
+    MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE"
+    MATERIALIZED_VIEW = "MATERIALIZED_VIEW"
+    STREAMING_TABLE = "STREAMING_TABLE"
+    VIEW = "VIEW"
 
 
 @dataclass
@@ -6920,28 +8304,38 @@ class TemporaryCredentials:
     def as_dict(self) -> dict:
         """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
-        if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
+        if self.aws_temp_credentials:
+            body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict()
+        if self.azure_aad:
+            body["azure_aad"] = self.azure_aad.as_dict()
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.gcp_oauth_token:
+            body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
-        if self.azure_aad: body['azure_aad'] = self.azure_aad
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token
+        if self.aws_temp_credentials:
+            body["aws_temp_credentials"] = self.aws_temp_credentials
+        if self.azure_aad:
+            body["azure_aad"] = self.azure_aad
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.gcp_oauth_token:
+            body["gcp_oauth_token"] = self.gcp_oauth_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TemporaryCredentials:
         """Deserializes the TemporaryCredentials from a dictionary."""
-        return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
-                   azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
-                   expiration_time=d.get('expiration_time', None),
-                   gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken))
+        return cls(
+            aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials),
+            azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken),
+            expiration_time=d.get("expiration_time", None),
+            gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken),
+        )
 
 
 @dataclass
@@ -6964,27 +8358,32 @@ def as_dict(self) -> dict:
         """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.last_processed_commit_version is not None:
-            body['last_processed_commit_version'] = self.last_processed_commit_version
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+            body["last_processed_commit_version"] = self.last_processed_commit_version
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         if self.triggered_update_progress:
-            body['triggered_update_progress'] = self.triggered_update_progress.as_dict()
+            body["triggered_update_progress"] = self.triggered_update_progress.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.last_processed_commit_version is not None:
-            body['last_processed_commit_version'] = self.last_processed_commit_version
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress
+            body["last_processed_commit_version"] = self.last_processed_commit_version
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.triggered_update_progress:
+            body["triggered_update_progress"] = self.triggered_update_progress
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggeredUpdateStatus:
         """Deserializes the TriggeredUpdateStatus from a dictionary."""
-        return cls(last_processed_commit_version=d.get('last_processed_commit_version', None),
-                   timestamp=d.get('timestamp', None),
-                   triggered_update_progress=_from_dict(d, 'triggered_update_progress', PipelineProgress))
+        return cls(
+            last_processed_commit_version=d.get("last_processed_commit_version", None),
+            timestamp=d.get("timestamp", None),
+            triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress),
+        )
 
 
 @dataclass
@@ -7027,10 +8426,10 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
 
 class UpdateBindingsSecurableType(Enum):
 
-    CATALOG = 'catalog'
-    CREDENTIAL = 'credential'
-    EXTERNAL_LOCATION = 'external_location'
-    STORAGE_CREDENTIAL = 'storage_credential'
+    CATALOG = "catalog"
+    CREDENTIAL = "credential"
+    EXTERNAL_LOCATION = "external_location"
+    STORAGE_CREDENTIAL = "storage_credential"
 
 
 @dataclass
@@ -7062,43 +8461,62 @@ class UpdateCatalog:
     def as_dict(self) -> dict:
         """Serializes the UpdateCatalog into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization.value
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization.value
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCatalog:
         """Deserializes the UpdateCatalog from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
-                                                        EnablePredictiveOptimization),
-                   isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   options=d.get('options', None),
-                   owner=d.get('owner', None),
-                   properties=d.get('properties', None))
+        return cls(
+            comment=d.get("comment", None),
+            enable_predictive_optimization=_enum(
+                d,
+                "enable_predictive_optimization",
+                EnablePredictiveOptimization,
+            ),
+            isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            options=d.get("options", None),
+            owner=d.get("owner", None),
+            properties=d.get("properties", None),
+        )
 
 
 @dataclass
@@ -7118,28 +8536,38 @@ class UpdateConnection:
     def as_dict(self) -> dict:
         """Serializes the UpdateConnection into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.options: body['options'] = self.options
-        if self.owner is not None: body['owner'] = self.owner
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.options:
+            body["options"] = self.options
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateConnection:
         """Deserializes the UpdateConnection from a dictionary."""
-        return cls(name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   options=d.get('options', None),
-                   owner=d.get('owner', None))
+        return cls(
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            options=d.get("options", None),
+            owner=d.get("owner", None),
+        )
 
 
 @dataclass
@@ -7185,56 +8613,82 @@ class UpdateCredentialRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
-        if self.force is not None: body['force'] = self.force
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.name_arg is not None: body['name_arg'] = self.name_arg
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
+        if self.force is not None:
+            body["force"] = self.force
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.name_arg is not None:
+            body["name_arg"] = self.name_arg
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.comment is not None: body['comment'] = self.comment
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
-        if self.force is not None: body['force'] = self.force
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.name_arg is not None: body['name_arg'] = self.name_arg
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
+        if self.force is not None:
+            body["force"] = self.force
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.name_arg is not None:
+            body["name_arg"] = self.name_arg
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest:
         """Deserializes the UpdateCredentialRequest from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   comment=d.get('comment', None),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccount),
-                   force=d.get('force', None),
-                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
-                   name_arg=d.get('name_arg', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   read_only=d.get('read_only', None),
-                   skip_validation=d.get('skip_validation', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            comment=d.get("comment", None),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccount,
+            ),
+            force=d.get("force", None),
+            isolation_mode=_enum(d, "isolation_mode", IsolationMode),
+            name_arg=d.get("name_arg", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            read_only=d.get("read_only", None),
+            skip_validation=d.get("skip_validation", None),
+        )
 
 
 @dataclass
@@ -7282,55 +8736,83 @@ class UpdateExternalLocation:
     def as_dict(self) -> dict:
         """Serializes the UpdateExternalLocation into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.comment is not None: body['comment'] = self.comment
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
-        if self.fallback is not None: body['fallback'] = self.fallback
-        if self.force is not None: body['force'] = self.force
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
-        if self.url is not None: body['url'] = self.url
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details.as_dict()
+        if self.fallback is not None:
+            body["fallback"] = self.fallback
+        if self.force is not None:
+            body["force"] = self.force
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.comment is not None: body['comment'] = self.comment
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
-        if self.encryption_details: body['encryption_details'] = self.encryption_details
-        if self.fallback is not None: body['fallback'] = self.fallback
-        if self.force is not None: body['force'] = self.force
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
-        if self.url is not None: body['url'] = self.url
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details
+        if self.fallback is not None:
+            body["fallback"] = self.fallback
+        if self.force is not None:
+            body["force"] = self.force
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation:
         """Deserializes the UpdateExternalLocation from a dictionary."""
-        return cls(access_point=d.get('access_point', None),
-                   comment=d.get('comment', None),
-                   credential_name=d.get('credential_name', None),
-                   encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
-                   fallback=d.get('fallback', None),
-                   force=d.get('force', None),
-                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   read_only=d.get('read_only', None),
-                   skip_validation=d.get('skip_validation', None),
-                   url=d.get('url', None))
+        return cls(
+            access_point=d.get("access_point", None),
+            comment=d.get("comment", None),
+            credential_name=d.get("credential_name", None),
+            encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
+            fallback=d.get("fallback", None),
+            force=d.get("force", None),
+            isolation_mode=_enum(d, "isolation_mode", IsolationMode),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            read_only=d.get("read_only", None),
+            skip_validation=d.get("skip_validation", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -7345,21 +8827,25 @@ class UpdateFunction:
     def as_dict(self) -> dict:
         """Serializes the UpdateFunction into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateFunction:
         """Deserializes the UpdateFunction from a dictionary."""
-        return cls(name=d.get('name', None), owner=d.get('owner', None))
+        return cls(name=d.get("name", None), owner=d.get("owner", None))
 
 
 @dataclass
@@ -7393,50 +8879,63 @@ def as_dict(self) -> dict:
         """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = self.delta_sharing_organization_name
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
-        if self.id is not None: body['id'] = self.id
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                self.delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if self.delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = self.delta_sharing_scope.value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
         if self.privilege_model_version is not None:
-            body['privilege_model_version'] = self.privilege_model_version
+            body["privilege_model_version"] = self.privilege_model_version
         if self.storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = self.storage_root_credential_id
+            body["storage_root_credential_id"] = self.storage_root_credential_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = self.delta_sharing_organization_name
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
-        if self.id is not None: body['id'] = self.id
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                self.delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if self.delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = self.delta_sharing_scope
+        if self.id is not None:
+            body["id"] = self.id
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
         if self.privilege_model_version is not None:
-            body['privilege_model_version'] = self.privilege_model_version
+            body["privilege_model_version"] = self.privilege_model_version
         if self.storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = self.storage_root_credential_id
+            body["storage_root_credential_id"] = self.storage_root_credential_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore:
         """Deserializes the UpdateMetastore from a dictionary."""
-        return cls(delta_sharing_organization_name=d.get('delta_sharing_organization_name', None),
-                   delta_sharing_recipient_token_lifetime_in_seconds=d.get(
-                       'delta_sharing_recipient_token_lifetime_in_seconds', None),
-                   delta_sharing_scope=_enum(d, 'delta_sharing_scope', UpdateMetastoreDeltaSharingScope),
-                   id=d.get('id', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   privilege_model_version=d.get('privilege_model_version', None),
-                   storage_root_credential_id=d.get('storage_root_credential_id', None))
+        return cls(
+            delta_sharing_organization_name=d.get("delta_sharing_organization_name", None),
+            delta_sharing_recipient_token_lifetime_in_seconds=d.get(
+                "delta_sharing_recipient_token_lifetime_in_seconds", None
+            ),
+            delta_sharing_scope=_enum(d, "delta_sharing_scope", UpdateMetastoreDeltaSharingScope),
+            id=d.get("id", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            privilege_model_version=d.get("privilege_model_version", None),
+            storage_root_credential_id=d.get("storage_root_credential_id", None),
+        )
 
 
 @dataclass
@@ -7454,32 +8953,40 @@ class UpdateMetastoreAssignment:
     def as_dict(self) -> dict:
         """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.default_catalog_name is not None:
+            body["default_catalog_name"] = self.default_catalog_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.default_catalog_name is not None:
+            body["default_catalog_name"] = self.default_catalog_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMetastoreAssignment:
         """Deserializes the UpdateMetastoreAssignment from a dictionary."""
-        return cls(default_catalog_name=d.get('default_catalog_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            default_catalog_name=d.get("default_catalog_name", None),
+            metastore_id=d.get("metastore_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 class UpdateMetastoreDeltaSharingScope(Enum):
     """The scope of Delta Sharing enabled for the metastore."""
 
-    INTERNAL = 'INTERNAL'
-    INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL'
+    INTERNAL = "INTERNAL"
+    INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL"
 
 
 @dataclass
@@ -7496,25 +9003,33 @@ class UpdateModelVersionRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest:
         """Deserializes the UpdateModelVersionRequest from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   full_name=d.get('full_name', None),
-                   version=d.get('version', None))
+        return cls(
+            comment=d.get("comment", None),
+            full_name=d.get("full_name", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -7564,55 +9079,82 @@ class UpdateMonitor:
     def as_dict(self) -> dict:
         """Serializes the UpdateMonitor into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
-        if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics]
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.baseline_table_name is not None:
+            body["baseline_table_name"] = self.baseline_table_name
+        if self.custom_metrics:
+            body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics]
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
         if self.data_classification_config:
-            body['data_classification_config'] = self.data_classification_config.as_dict()
-        if self.inference_log: body['inference_log'] = self.inference_log.as_dict()
-        if self.notifications: body['notifications'] = self.notifications.as_dict()
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
-        if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs]
-        if self.snapshot: body['snapshot'] = self.snapshot.as_dict()
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.time_series: body['time_series'] = self.time_series.as_dict()
+            body["data_classification_config"] = self.data_classification_config.as_dict()
+        if self.inference_log:
+            body["inference_log"] = self.inference_log.as_dict()
+        if self.notifications:
+            body["notifications"] = self.notifications.as_dict()
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
+        if self.slicing_exprs:
+            body["slicing_exprs"] = [v for v in self.slicing_exprs]
+        if self.snapshot:
+            body["snapshot"] = self.snapshot.as_dict()
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.time_series:
+            body["time_series"] = self.time_series.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
-        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.baseline_table_name is not None:
+            body["baseline_table_name"] = self.baseline_table_name
+        if self.custom_metrics:
+            body["custom_metrics"] = self.custom_metrics
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
         if self.data_classification_config:
-            body['data_classification_config'] = self.data_classification_config
-        if self.inference_log: body['inference_log'] = self.inference_log
-        if self.notifications: body['notifications'] = self.notifications
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
-        if self.schedule: body['schedule'] = self.schedule
-        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
-        if self.snapshot: body['snapshot'] = self.snapshot
-        if self.table_name is not None: body['table_name'] = self.table_name
-        if self.time_series: body['time_series'] = self.time_series
+            body["data_classification_config"] = self.data_classification_config
+        if self.inference_log:
+            body["inference_log"] = self.inference_log
+        if self.notifications:
+            body["notifications"] = self.notifications
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
+        if self.schedule:
+            body["schedule"] = self.schedule
+        if self.slicing_exprs:
+            body["slicing_exprs"] = self.slicing_exprs
+        if self.snapshot:
+            body["snapshot"] = self.snapshot
+        if self.table_name is not None:
+            body["table_name"] = self.table_name
+        if self.time_series:
+            body["time_series"] = self.time_series
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMonitor:
         """Deserializes the UpdateMonitor from a dictionary."""
-        return cls(baseline_table_name=d.get('baseline_table_name', None),
-                   custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric),
-                   dashboard_id=d.get('dashboard_id', None),
-                   data_classification_config=_from_dict(d, 'data_classification_config',
-                                                         MonitorDataClassificationConfig),
-                   inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog),
-                   notifications=_from_dict(d, 'notifications', MonitorNotifications),
-                   output_schema_name=d.get('output_schema_name', None),
-                   schedule=_from_dict(d, 'schedule', MonitorCronSchedule),
-                   slicing_exprs=d.get('slicing_exprs', None),
-                   snapshot=_from_dict(d, 'snapshot', MonitorSnapshot),
-                   table_name=d.get('table_name', None),
-                   time_series=_from_dict(d, 'time_series', MonitorTimeSeries))
+        return cls(
+            baseline_table_name=d.get("baseline_table_name", None),
+            custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric),
+            dashboard_id=d.get("dashboard_id", None),
+            data_classification_config=_from_dict(
+                d,
+                "data_classification_config",
+                MonitorDataClassificationConfig,
+            ),
+            inference_log=_from_dict(d, "inference_log", MonitorInferenceLog),
+            notifications=_from_dict(d, "notifications", MonitorNotifications),
+            output_schema_name=d.get("output_schema_name", None),
+            schedule=_from_dict(d, "schedule", MonitorCronSchedule),
+            slicing_exprs=d.get("slicing_exprs", None),
+            snapshot=_from_dict(d, "snapshot", MonitorSnapshot),
+            table_name=d.get("table_name", None),
+            time_series=_from_dict(d, "time_series", MonitorTimeSeries),
+        )
 
 
 @dataclass
@@ -7629,25 +9171,33 @@ class UpdatePermissions:
     def as_dict(self) -> dict:
         """Serializes the UpdatePermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.changes: body['changes'] = [v.as_dict() for v in self.changes]
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.securable_type is not None: body['securable_type'] = self.securable_type.value
+        if self.changes:
+            body["changes"] = [v.as_dict() for v in self.changes]
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.changes: body['changes'] = self.changes
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.changes:
+            body["changes"] = self.changes
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePermissions:
         """Deserializes the UpdatePermissions from a dictionary."""
-        return cls(changes=_repeated_dict(d, 'changes', PermissionsChange),
-                   full_name=d.get('full_name', None),
-                   securable_type=_enum(d, 'securable_type', SecurableType))
+        return cls(
+            changes=_repeated_dict(d, "changes", PermissionsChange),
+            full_name=d.get("full_name", None),
+            securable_type=_enum(d, "securable_type", SecurableType),
+        )
 
 
 @dataclass
@@ -7667,28 +9217,38 @@ class UpdateRegisteredModelRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateRegisteredModelRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRegisteredModelRequest:
         """Deserializes the UpdateRegisteredModelRequest from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   full_name=d.get('full_name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None))
+        return cls(
+            comment=d.get("comment", None),
+            full_name=d.get("full_name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+        )
 
 
 @dataclass
@@ -7733,37 +9293,52 @@ class UpdateSchema:
     def as_dict(self) -> dict:
         """Serializes the UpdateSchema into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization.value
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization.value
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = self.enable_predictive_optimization
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties: body['properties'] = self.properties
+            body["enable_predictive_optimization"] = self.enable_predictive_optimization
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties:
+            body["properties"] = self.properties
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSchema:
         """Deserializes the UpdateSchema from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   enable_predictive_optimization=_enum(d, 'enable_predictive_optimization',
-                                                        EnablePredictiveOptimization),
-                   full_name=d.get('full_name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   properties=d.get('properties', None))
+        return cls(
+            comment=d.get("comment", None),
+            enable_predictive_optimization=_enum(
+                d,
+                "enable_predictive_optimization",
+                EnablePredictiveOptimization,
+            ),
+            full_name=d.get("full_name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            properties=d.get("properties", None),
+        )
 
 
 @dataclass
@@ -7809,60 +9384,87 @@ class UpdateStorageCredential:
     def as_dict(self) -> dict:
         """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
-        if self.force is not None: body['force'] = self.force
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
+        if self.force is not None:
+            body["force"] = self.force
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
-        if self.comment is not None: body['comment'] = self.comment
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
-        if self.force is not None: body['force'] = self.force
-        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
+        if self.force is not None:
+            body["force"] = self.force
+        if self.isolation_mode is not None:
+            body["isolation_mode"] = self.isolation_mode
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateStorageCredential:
         """Deserializes the UpdateStorageCredential from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity',
-                                                     AzureManagedIdentityResponse),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
-                   comment=d.get('comment', None),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccountRequest),
-                   force=d.get('force', None),
-                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   read_only=d.get('read_only', None),
-                   skip_validation=d.get('skip_validation', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken),
+            comment=d.get("comment", None),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccountRequest,
+            ),
+            force=d.get("force", None),
+            isolation_mode=_enum(d, "isolation_mode", IsolationMode),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            read_only=d.get("read_only", None),
+            skip_validation=d.get("skip_validation", None),
+        )
 
 
 @dataclass
@@ -7882,28 +9484,38 @@ class UpdateVolumeRequestContent:
     def as_dict(self) -> dict:
         """Serializes the UpdateVolumeRequestContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVolumeRequestContent:
         """Deserializes the UpdateVolumeRequestContent from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+        )
 
 
 @dataclass
@@ -7920,25 +9532,33 @@ class UpdateWorkspaceBindings:
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceBindings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.assign_workspaces: body['assign_workspaces'] = [v for v in self.assign_workspaces]
-        if self.name is not None: body['name'] = self.name
-        if self.unassign_workspaces: body['unassign_workspaces'] = [v for v in self.unassign_workspaces]
+        if self.assign_workspaces:
+            body["assign_workspaces"] = [v for v in self.assign_workspaces]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.unassign_workspaces:
+            body["unassign_workspaces"] = [v for v in self.unassign_workspaces]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.assign_workspaces: body['assign_workspaces'] = self.assign_workspaces
-        if self.name is not None: body['name'] = self.name
-        if self.unassign_workspaces: body['unassign_workspaces'] = self.unassign_workspaces
+        if self.assign_workspaces:
+            body["assign_workspaces"] = self.assign_workspaces
+        if self.name is not None:
+            body["name"] = self.name
+        if self.unassign_workspaces:
+            body["unassign_workspaces"] = self.unassign_workspaces
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindings:
         """Deserializes the UpdateWorkspaceBindings from a dictionary."""
-        return cls(assign_workspaces=d.get('assign_workspaces', None),
-                   name=d.get('name', None),
-                   unassign_workspaces=d.get('unassign_workspaces', None))
+        return cls(
+            assign_workspaces=d.get("assign_workspaces", None),
+            name=d.get("name", None),
+            unassign_workspaces=d.get("unassign_workspaces", None),
+        )
 
 
 @dataclass
@@ -7958,28 +9578,38 @@ class UpdateWorkspaceBindingsParameters:
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.add: body['add'] = [v.as_dict() for v in self.add]
-        if self.remove: body['remove'] = [v.as_dict() for v in self.remove]
-        if self.securable_name is not None: body['securable_name'] = self.securable_name
-        if self.securable_type is not None: body['securable_type'] = self.securable_type.value
+        if self.add:
+            body["add"] = [v.as_dict() for v in self.add]
+        if self.remove:
+            body["remove"] = [v.as_dict() for v in self.remove]
+        if self.securable_name is not None:
+            body["securable_name"] = self.securable_name
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.add: body['add'] = self.add
-        if self.remove: body['remove'] = self.remove
-        if self.securable_name is not None: body['securable_name'] = self.securable_name
-        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.add:
+            body["add"] = self.add
+        if self.remove:
+            body["remove"] = self.remove
+        if self.securable_name is not None:
+            body["securable_name"] = self.securable_name
+        if self.securable_type is not None:
+            body["securable_type"] = self.securable_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters:
         """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary."""
-        return cls(add=_repeated_dict(d, 'add', WorkspaceBinding),
-                   remove=_repeated_dict(d, 'remove', WorkspaceBinding),
-                   securable_name=d.get('securable_name', None),
-                   securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType))
+        return cls(
+            add=_repeated_dict(d, "add", WorkspaceBinding),
+            remove=_repeated_dict(d, "remove", WorkspaceBinding),
+            securable_name=d.get("securable_name", None),
+            securable_type=_enum(d, "securable_type", UpdateBindingsSecurableType),
+        )
 
 
 @dataclass
@@ -8010,39 +9640,53 @@ class ValidateCredentialRequest:
     def as_dict(self) -> dict:
         """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
         if self.external_location_name is not None:
-            body['external_location_name'] = self.external_location_name
-        if self.purpose is not None: body['purpose'] = self.purpose.value
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.url is not None: body['url'] = self.url
+            body["external_location_name"] = self.external_location_name
+        if self.purpose is not None:
+            body["purpose"] = self.purpose.value
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.credential_name is not None:
+            body["credential_name"] = self.credential_name
         if self.external_location_name is not None:
-            body['external_location_name'] = self.external_location_name
-        if self.purpose is not None: body['purpose'] = self.purpose
-        if self.read_only is not None: body['read_only'] = self.read_only
-        if self.url is not None: body['url'] = self.url
+            body["external_location_name"] = self.external_location_name
+        if self.purpose is not None:
+            body["purpose"] = self.purpose
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest:
         """Deserializes the ValidateCredentialRequest from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
-                   credential_name=d.get('credential_name', None),
-                   external_location_name=d.get('external_location_name', None),
-                   purpose=_enum(d, 'purpose', CredentialPurpose),
-                   read_only=d.get('read_only', None),
-                   url=d.get('url', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity),
+            credential_name=d.get("credential_name", None),
+            external_location_name=d.get("external_location_name", None),
+            purpose=_enum(d, "purpose", CredentialPurpose),
+            read_only=d.get("read_only", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -8057,30 +9701,36 @@ class ValidateCredentialResponse:
     def as_dict(self) -> dict:
         """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_dir is not None: body['isDir'] = self.is_dir
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.is_dir is not None:
+            body["isDir"] = self.is_dir
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ValidateCredentialResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_dir is not None: body['isDir'] = self.is_dir
-        if self.results: body['results'] = self.results
+        if self.is_dir is not None:
+            body["isDir"] = self.is_dir
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse:
         """Deserializes the ValidateCredentialResponse from a dictionary."""
-        return cls(is_dir=d.get('isDir', None),
-                   results=_repeated_dict(d, 'results', CredentialValidationResult))
+        return cls(
+            is_dir=d.get("isDir", None),
+            results=_repeated_dict(d, "results", CredentialValidationResult),
+        )
 
 
 class ValidateCredentialResult(Enum):
     """A enum represents the result of the file operation"""
 
-    FAIL = 'FAIL'
-    PASS = 'PASS'
-    SKIP = 'SKIP'
+    FAIL = "FAIL"
+    PASS = "PASS"
+    SKIP = "SKIP"
 
 
 @dataclass
@@ -8115,52 +9765,67 @@ class ValidateStorageCredential:
     def as_dict(self) -> dict:
         """Serializes the ValidateStorageCredential into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity.as_dict()
         if self.azure_service_principal:
-            body['azure_service_principal'] = self.azure_service_principal.as_dict()
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict()
+            body["azure_service_principal"] = self.azure_service_principal.as_dict()
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict()
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict()
         if self.external_location_name is not None:
-            body['external_location_name'] = self.external_location_name
-        if self.read_only is not None: body['read_only'] = self.read_only
+            body["external_location_name"] = self.external_location_name
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
         if self.storage_credential_name is not None:
-            body['storage_credential_name'] = self.storage_credential_name
-        if self.url is not None: body['url'] = self.url
+            body["storage_credential_name"] = self.storage_credential_name
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
-        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
-        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
-        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.aws_iam_role:
+            body["aws_iam_role"] = self.aws_iam_role
+        if self.azure_managed_identity:
+            body["azure_managed_identity"] = self.azure_managed_identity
+        if self.azure_service_principal:
+            body["azure_service_principal"] = self.azure_service_principal
+        if self.cloudflare_api_token:
+            body["cloudflare_api_token"] = self.cloudflare_api_token
         if self.databricks_gcp_service_account:
-            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+            body["databricks_gcp_service_account"] = self.databricks_gcp_service_account
         if self.external_location_name is not None:
-            body['external_location_name'] = self.external_location_name
-        if self.read_only is not None: body['read_only'] = self.read_only
+            body["external_location_name"] = self.external_location_name
+        if self.read_only is not None:
+            body["read_only"] = self.read_only
         if self.storage_credential_name is not None:
-            body['storage_credential_name'] = self.storage_credential_name
-        if self.url is not None: body['url'] = self.url
+            body["storage_credential_name"] = self.storage_credential_name
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredential:
         """Deserializes the ValidateStorageCredential from a dictionary."""
-        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest),
-                   azure_managed_identity=_from_dict(d, 'azure_managed_identity',
-                                                     AzureManagedIdentityRequest),
-                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
-                   cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken),
-                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
-                                                             DatabricksGcpServiceAccountRequest),
-                   external_location_name=d.get('external_location_name', None),
-                   read_only=d.get('read_only', None),
-                   storage_credential_name=d.get('storage_credential_name', None),
-                   url=d.get('url', None))
+        return cls(
+            aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest),
+            azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest),
+            azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal),
+            cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken),
+            databricks_gcp_service_account=_from_dict(
+                d,
+                "databricks_gcp_service_account",
+                DatabricksGcpServiceAccountRequest,
+            ),
+            external_location_name=d.get("external_location_name", None),
+            read_only=d.get("read_only", None),
+            storage_credential_name=d.get("storage_credential_name", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -8174,21 +9839,28 @@ class ValidateStorageCredentialResponse:
     def as_dict(self) -> dict:
         """Serializes the ValidateStorageCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_dir is not None: body['isDir'] = self.is_dir
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.is_dir is not None:
+            body["isDir"] = self.is_dir
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ValidateStorageCredentialResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_dir is not None: body['isDir'] = self.is_dir
-        if self.results: body['results'] = self.results
+        if self.is_dir is not None:
+            body["isDir"] = self.is_dir
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredentialResponse:
         """Deserializes the ValidateStorageCredentialResponse from a dictionary."""
-        return cls(is_dir=d.get('isDir', None), results=_repeated_dict(d, 'results', ValidationResult))
+        return cls(
+            is_dir=d.get("isDir", None),
+            results=_repeated_dict(d, "results", ValidationResult),
+        )
 
 
 @dataclass
@@ -8205,43 +9877,51 @@ class ValidationResult:
     def as_dict(self) -> dict:
         """Serializes the ValidationResult into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.operation is not None: body['operation'] = self.operation.value
-        if self.result is not None: body['result'] = self.result.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.operation is not None:
+            body["operation"] = self.operation.value
+        if self.result is not None:
+            body["result"] = self.result.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ValidationResult into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.operation is not None: body['operation'] = self.operation
-        if self.result is not None: body['result'] = self.result
+        if self.message is not None:
+            body["message"] = self.message
+        if self.operation is not None:
+            body["operation"] = self.operation
+        if self.result is not None:
+            body["result"] = self.result
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidationResult:
         """Deserializes the ValidationResult from a dictionary."""
-        return cls(message=d.get('message', None),
-                   operation=_enum(d, 'operation', ValidationResultOperation),
-                   result=_enum(d, 'result', ValidationResultResult))
+        return cls(
+            message=d.get("message", None),
+            operation=_enum(d, "operation", ValidationResultOperation),
+            result=_enum(d, "result", ValidationResultResult),
+        )
 
 
 class ValidationResultOperation(Enum):
     """The operation tested."""
 
-    DELETE = 'DELETE'
-    LIST = 'LIST'
-    PATH_EXISTS = 'PATH_EXISTS'
-    READ = 'READ'
-    WRITE = 'WRITE'
+    DELETE = "DELETE"
+    LIST = "LIST"
+    PATH_EXISTS = "PATH_EXISTS"
+    READ = "READ"
+    WRITE = "WRITE"
 
 
 class ValidationResultResult(Enum):
     """The results of the tested operation."""
 
-    FAIL = 'FAIL'
-    PASS = 'PASS'
-    SKIP = 'SKIP'
+    FAIL = "FAIL"
+    PASS = "PASS"
+    SKIP = "SKIP"
 
 
 @dataclass
@@ -8298,73 +9978,109 @@ class VolumeInfo:
     def as_dict(self) -> dict:
         """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict()
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.volume_id is not None: body['volume_id'] = self.volume_id
-        if self.volume_type is not None: body['volume_type'] = self.volume_type.value
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details.as_dict()
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.volume_id is not None:
+            body["volume_id"] = self.volume_id
+        if self.volume_type is not None:
+            body["volume_type"] = self.volume_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the VolumeInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_point is not None: body['access_point'] = self.access_point
-        if self.browse_only is not None: body['browse_only'] = self.browse_only
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.encryption_details: body['encryption_details'] = self.encryption_details
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.volume_id is not None: body['volume_id'] = self.volume_id
-        if self.volume_type is not None: body['volume_type'] = self.volume_type
+        if self.access_point is not None:
+            body["access_point"] = self.access_point
+        if self.browse_only is not None:
+            body["browse_only"] = self.browse_only
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.encryption_details:
+            body["encryption_details"] = self.encryption_details
+        if self.full_name is not None:
+            body["full_name"] = self.full_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.volume_id is not None:
+            body["volume_id"] = self.volume_id
+        if self.volume_type is not None:
+            body["volume_type"] = self.volume_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VolumeInfo:
         """Deserializes the VolumeInfo from a dictionary."""
-        return cls(access_point=d.get('access_point', None),
-                   browse_only=d.get('browse_only', None),
-                   catalog_name=d.get('catalog_name', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails),
-                   full_name=d.get('full_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   schema_name=d.get('schema_name', None),
-                   storage_location=d.get('storage_location', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   volume_id=d.get('volume_id', None),
-                   volume_type=_enum(d, 'volume_type', VolumeType))
+        return cls(
+            access_point=d.get("access_point", None),
+            browse_only=d.get("browse_only", None),
+            catalog_name=d.get("catalog_name", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
+            full_name=d.get("full_name", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            schema_name=d.get("schema_name", None),
+            storage_location=d.get("storage_location", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            volume_id=d.get("volume_id", None),
+            volume_type=_enum(d, "volume_type", VolumeType),
+        )
 
 
 class VolumeType(Enum):
 
-    EXTERNAL = 'EXTERNAL'
-    MANAGED = 'MANAGED'
+    EXTERNAL = "EXTERNAL"
+    MANAGED = "MANAGED"
 
 
 @dataclass
@@ -8376,28 +10092,34 @@ class WorkspaceBinding:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceBinding into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.binding_type is not None: body['binding_type'] = self.binding_type.value
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.binding_type is not None:
+            body["binding_type"] = self.binding_type.value
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceBinding into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.binding_type is not None: body['binding_type'] = self.binding_type
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.binding_type is not None:
+            body["binding_type"] = self.binding_type
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBinding:
         """Deserializes the WorkspaceBinding from a dictionary."""
-        return cls(binding_type=_enum(d, 'binding_type', WorkspaceBindingBindingType),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            binding_type=_enum(d, "binding_type", WorkspaceBindingBindingType),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 class WorkspaceBindingBindingType(Enum):
 
-    BINDING_TYPE_READ_ONLY = 'BINDING_TYPE_READ_ONLY'
-    BINDING_TYPE_READ_WRITE = 'BINDING_TYPE_READ_WRITE'
+    BINDING_TYPE_READ_ONLY = "BINDING_TYPE_READ_ONLY"
+    BINDING_TYPE_READ_WRITE = "BINDING_TYPE_READ_WRITE"
 
 
 @dataclass
@@ -8414,22 +10136,28 @@ class WorkspaceBindingsResponse:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.bindings:
+            body["bindings"] = [v.as_dict() for v in self.bindings]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bindings: body['bindings'] = self.bindings
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.bindings:
+            body["bindings"] = self.bindings
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse:
         """Deserializes the WorkspaceBindingsResponse from a dictionary."""
-        return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            bindings=_repeated_dict(d, "bindings", WorkspaceBinding),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 class AccountMetastoreAssignmentsAPI:
@@ -8438,119 +10166,144 @@ class AccountMetastoreAssignmentsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               workspace_id: int,
-               metastore_id: str,
-               *,
-               metastore_assignment: Optional[CreateMetastoreAssignment] = None):
+    def create(
+        self,
+        workspace_id: int,
+        metastore_id: str,
+        *,
+        metastore_assignment: Optional[CreateMetastoreAssignment] = None,
+    ):
         """Assigns a workspace to a metastore.
-        
+
         Creates an assignment to a metastore for a workspace
-        
+
         :param workspace_id: int
           Workspace ID.
         :param metastore_id: str
           Unity Catalog metastore ID
         :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional)
-        
-        
+
+
         """
         body = {}
-        if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if metastore_assignment is not None:
+            body["metastore_assignment"] = metastore_assignment.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         self._api.do(
-            'POST',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}',
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
 
     def delete(self, workspace_id: int, metastore_id: str):
         """Delete a metastore assignment.
-        
+
         Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
-        
+
         :param workspace_id: int
           Workspace ID.
         :param metastore_id: str
           Unity Catalog metastore ID
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}",
+            headers=headers,
+        )
 
     def get(self, workspace_id: int) -> AccountsMetastoreAssignment:
         """Gets the metastore assignment for a workspace.
-        
+
         Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned
         a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment
         will not be found and a 404 returned.
-        
+
         :param workspace_id: int
           Workspace ID.
-        
+
         :returns: :class:`AccountsMetastoreAssignment`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore",
+            headers=headers,
+        )
         return AccountsMetastoreAssignment.from_dict(res)
 
     def list(self, metastore_id: str) -> Iterator[int]:
         """Get all workspaces assigned to a metastore.
-        
+
         Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
-        
+
         :returns: Iterator over int
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET',
-                            f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces',
-                            headers=headers)
+        json = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces",
+            headers=headers,
+        )
         parsed = ListAccountMetastoreAssignmentsResponse.from_dict(json).workspace_ids
         return parsed if parsed is not None else []
 
-    def update(self,
-               workspace_id: int,
-               metastore_id: str,
-               *,
-               metastore_assignment: Optional[UpdateMetastoreAssignment] = None):
+    def update(
+        self,
+        workspace_id: int,
+        metastore_id: str,
+        *,
+        metastore_assignment: Optional[UpdateMetastoreAssignment] = None,
+    ):
         """Updates a metastore assignment to a workspaces.
-        
+
         Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be
         updated.
-        
+
         :param workspace_id: int
           Workspace ID.
         :param metastore_id: str
           Unity Catalog metastore ID
         :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional)
-        
-        
+
+
         """
         body = {}
-        if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if metastore_assignment is not None:
+            body["metastore_assignment"] = metastore_assignment.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         self._api.do(
-            'PUT',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}',
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
 
 
 class AccountMetastoresAPI:
@@ -8562,99 +10315,128 @@ def __init__(self, api_client):
 
     def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo:
         """Create metastore.
-        
+
         Creates a Unity Catalog metastore.
-        
+
         :param metastore_info: :class:`CreateMetastore` (optional)
-        
+
         :returns: :class:`AccountsMetastoreInfo`
         """
         body = {}
-        if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if metastore_info is not None:
+            body["metastore_info"] = metastore_info.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/metastores',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores",
+            body=body,
+            headers=headers,
+        )
         return AccountsMetastoreInfo.from_dict(res)
 
     def delete(self, metastore_id: str, *, force: Optional[bool] = None):
         """Delete a metastore.
-        
+
         Deletes a Unity Catalog metastore for an account, both specified by ID.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
         :param force: bool (optional)
           Force deletion even if the metastore is not empty. Default is false.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}',
-                     query=query,
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, metastore_id: str) -> AccountsMetastoreInfo:
         """Get a metastore.
-        
+
         Gets a Unity Catalog metastore from an account, both specified by ID.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
-        
+
         :returns: :class:`AccountsMetastoreInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}",
+            headers=headers,
+        )
         return AccountsMetastoreInfo.from_dict(res)
 
     def list(self) -> Iterator[MetastoreInfo]:
         """Get all metastores associated with an account.
-        
+
         Gets all Unity Catalog metastores associated with an account specified by ID.
-        
+
         :returns: Iterator over :class:`MetastoreInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/metastores', headers=headers)
+        json = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores",
+            headers=headers,
+        )
         parsed = ListMetastoresResponse.from_dict(json).metastores
         return parsed if parsed is not None else []
 
-    def update(self,
-               metastore_id: str,
-               *,
-               metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo:
+    def update(
+        self,
+        metastore_id: str,
+        *,
+        metastore_info: Optional[UpdateMetastore] = None,
+    ) -> AccountsMetastoreInfo:
         """Update a metastore.
-        
+
         Updates an existing Unity Catalog metastore.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
         :param metastore_info: :class:`UpdateMetastore` (optional)
-        
+
         :returns: :class:`AccountsMetastoreInfo`
         """
         body = {}
-        if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if metastore_info is not None:
+            body["metastore_info"] = metastore_info.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}",
+            body=body,
+            headers=headers,
+        )
         return AccountsMetastoreInfo.from_dict(res)
 
 
@@ -8664,191 +10446,235 @@ class AccountStorageCredentialsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               metastore_id: str,
-               *,
-               credential_info: Optional[CreateStorageCredential] = None) -> AccountsStorageCredentialInfo:
+    def create(
+        self,
+        metastore_id: str,
+        *,
+        credential_info: Optional[CreateStorageCredential] = None,
+    ) -> AccountsStorageCredentialInfo:
         """Create a storage credential.
-        
+
         Creates a new storage credential. The request object is specific to the cloud:
-        
+
         * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
         **GcpServiceAcountKey** for GCP credentials.
-        
+
         The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the
         metastore.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
         :param credential_info: :class:`CreateStorageCredential` (optional)
-        
+
         :returns: :class:`AccountsStorageCredentialInfo`
         """
         body = {}
-        if credential_info is not None: body['credential_info'] = credential_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if credential_info is not None:
+            body["credential_info"] = credential_info.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'POST',
-            f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials',
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return AccountsStorageCredentialInfo.from_dict(res)
 
-    def delete(self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None):
+    def delete(
+        self,
+        metastore_id: str,
+        storage_credential_name: str,
+        *,
+        force: Optional[bool] = None,
+    ):
         """Delete a storage credential.
-        
+
         Deletes a storage credential from the metastore. The caller must be an owner of the storage
         credential.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
         :param storage_credential_name: str
           Name of the storage credential.
         :param force: bool (optional)
           Force deletion even if the Storage Credential is not empty. Default is false.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}',
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
 
     def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo:
         """Gets the named storage credential.
-        
+
         Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
         storage credential, or have a level of privilege on the storage credential.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
         :param storage_credential_name: str
           Name of the storage credential.
-        
+
         :returns: :class:`AccountsStorageCredentialInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}",
+            headers=headers,
+        )
         return AccountsStorageCredentialInfo.from_dict(res)
 
     def list(self, metastore_id: str) -> Iterator[StorageCredentialInfo]:
         """Get all storage credentials assigned to a metastore.
-        
+
         Gets a list of all storage credentials that have been assigned to given metastore.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
-        
+
         :returns: Iterator over :class:`StorageCredentialInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         json = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials",
+            headers=headers,
+        )
         parsed = ListAccountStorageCredentialsResponse.from_dict(json).storage_credentials
         return parsed if parsed is not None else []
 
-    def update(self,
-               metastore_id: str,
-               storage_credential_name: str,
-               *,
-               credential_info: Optional[UpdateStorageCredential] = None) -> AccountsStorageCredentialInfo:
+    def update(
+        self,
+        metastore_id: str,
+        storage_credential_name: str,
+        *,
+        credential_info: Optional[UpdateStorageCredential] = None,
+    ) -> AccountsStorageCredentialInfo:
         """Updates a storage credential.
-        
+
         Updates a storage credential on the metastore. The caller must be the owner of the storage credential.
         If the caller is a metastore admin, only the __owner__ credential can be changed.
-        
+
         :param metastore_id: str
           Unity Catalog metastore ID
         :param storage_credential_name: str
           Name of the storage credential.
         :param credential_info: :class:`UpdateStorageCredential` (optional)
-        
+
         :returns: :class:`AccountsStorageCredentialInfo`
         """
         body = {}
-        if credential_info is not None: body['credential_info'] = credential_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if credential_info is not None:
+            body["credential_info"] = credential_info.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PUT',
-            f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}',
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return AccountsStorageCredentialInfo.from_dict(res)
 
 
 class ArtifactAllowlistsAPI:
     """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so
-    that users can leverage these artifacts on compute configured with shared access mode."""
+    that users can leverage these artifacts on compute configured with shared access mode.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def get(self, artifact_type: ArtifactType) -> ArtifactAllowlistInfo:
         """Get an artifact allowlist.
-        
+
         Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have
         the **MANAGE ALLOWLIST** privilege on the metastore.
-        
+
         :param artifact_type: :class:`ArtifactType`
           The artifact type of the allowlist.
-        
+
         :returns: :class:`ArtifactAllowlistInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}",
+            headers=headers,
+        )
         return ArtifactAllowlistInfo.from_dict(res)
 
-    def update(self, artifact_type: ArtifactType,
-               artifact_matchers: List[ArtifactMatcher]) -> ArtifactAllowlistInfo:
+    def update(
+        self,
+        artifact_type: ArtifactType,
+        artifact_matchers: List[ArtifactMatcher],
+    ) -> ArtifactAllowlistInfo:
         """Set an artifact allowlist.
-        
+
         Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
         the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on
         the metastore.
-        
+
         :param artifact_type: :class:`ArtifactType`
           The artifact type of the allowlist.
         :param artifact_matchers: List[:class:`ArtifactMatcher`]
           A list of allowed artifact match patterns.
-        
+
         :returns: :class:`ArtifactAllowlistInfo`
         """
         body = {}
-        if artifact_matchers is not None: body['artifact_matchers'] = [v.as_dict() for v in artifact_matchers]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if artifact_matchers is not None:
+            body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}",
+            body=body,
+            headers=headers,
+        )
         return ArtifactAllowlistInfo.from_dict(res)
 
 
 class CatalogsAPI:
     """A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data
     assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission.
-    
+
     In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of
     the workspaces in a Databricks account. Users in different workspaces can share access to the same data,
     depending on privileges granted centrally in Unity Catalog."""
@@ -8856,21 +10682,23 @@ class CatalogsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               connection_name: Optional[str] = None,
-               options: Optional[Dict[str, str]] = None,
-               properties: Optional[Dict[str, str]] = None,
-               provider_name: Optional[str] = None,
-               share_name: Optional[str] = None,
-               storage_root: Optional[str] = None) -> CatalogInfo:
+    def create(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        connection_name: Optional[str] = None,
+        options: Optional[Dict[str, str]] = None,
+        properties: Optional[Dict[str, str]] = None,
+        provider_name: Optional[str] = None,
+        share_name: Optional[str] = None,
+        storage_root: Optional[str] = None,
+    ) -> CatalogInfo:
         """Create a catalog.
-        
+
         Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
         **CREATE_CATALOG** privilege.
-        
+
         :param name: str
           Name of catalog.
         :param comment: str (optional)
@@ -8883,83 +10711,117 @@ def create(self,
           A map of key-value properties attached to the securable.
         :param provider_name: str (optional)
           The name of delta sharing provider.
-          
+
           A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.
         :param share_name: str (optional)
           The name of the share under the share provider.
         :param storage_root: str (optional)
           Storage root URL for managed tables within catalog.
-        
+
         :returns: :class:`CatalogInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if connection_name is not None: body['connection_name'] = connection_name
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options
-        if properties is not None: body['properties'] = properties
-        if provider_name is not None: body['provider_name'] = provider_name
-        if share_name is not None: body['share_name'] = share_name
-        if storage_root is not None: body['storage_root'] = storage_root
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/catalogs', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if connection_name is not None:
+            body["connection_name"] = connection_name
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options
+        if properties is not None:
+            body["properties"] = properties
+        if provider_name is not None:
+            body["provider_name"] = provider_name
+        if share_name is not None:
+            body["share_name"] = share_name
+        if storage_root is not None:
+            body["storage_root"] = storage_root
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/catalogs",
+            body=body,
+            headers=headers,
+        )
         return CatalogInfo.from_dict(res)
 
     def delete(self, name: str, *, force: Optional[bool] = None):
         """Delete a catalog.
-        
+
         Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner
         of the catalog.
-        
+
         :param name: str
           The name of the catalog.
         :param force: bool (optional)
           Force deletion even if the catalog is not empty.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/catalogs/{name}', query=query, headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/catalogs/{name}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, name: str, *, include_browse: Optional[bool] = None) -> CatalogInfo:
         """Get a catalog.
-        
+
         Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the
         catalog, or a user that has the **USE_CATALOG** privilege set for their account.
-        
+
         :param name: str
           The name of the catalog.
         :param include_browse: bool (optional)
           Whether to include catalogs in the response for which the principal can only access selective
           metadata for
-        
+
         :returns: :class:`CatalogInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/catalogs/{name}', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/catalogs/{name}",
+            query=query,
+            headers=headers,
+        )
         return CatalogInfo.from_dict(res)
 
-    def list(self,
-             *,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[CatalogInfo]:
+    def list(
+        self,
+        *,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[CatalogInfo]:
         """List catalogs.
-        
+
         Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be
         retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the
         **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the
         elements in the array.
-        
+
         :param include_browse: bool (optional)
           Whether to include catalogs in the response for which the principal can only access selective
           metadata for
@@ -8973,41 +10835,54 @@ def list(self,
           response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`CatalogInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', query=query, headers=headers)
-            if 'catalogs' in json:
-                for v in json['catalogs']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/catalogs",
+                query=query,
+                headers=headers,
+            )
+            if "catalogs" in json:
+                for v in json["catalogs"]:
                     yield CatalogInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
-               isolation_mode: Optional[CatalogIsolationMode] = None,
-               new_name: Optional[str] = None,
-               options: Optional[Dict[str, str]] = None,
-               owner: Optional[str] = None,
-               properties: Optional[Dict[str, str]] = None) -> CatalogInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
+        isolation_mode: Optional[CatalogIsolationMode] = None,
+        new_name: Optional[str] = None,
+        options: Optional[Dict[str, str]] = None,
+        owner: Optional[str] = None,
+        properties: Optional[Dict[str, str]] = None,
+    ) -> CatalogInfo:
         """Update a catalog.
-        
+
         Updates the catalog that matches the supplied name. The caller must be either the owner of the
         catalog, or a metastore admin (when changing the owner field of the catalog).
-        
+
         :param name: str
           The name of the catalog.
         :param comment: str (optional)
@@ -9024,52 +10899,69 @@ def update(self,
           Username of current owner of catalog.
         :param properties: Dict[str,str] (optional)
           A map of key-value properties attached to the securable.
-        
+
         :returns: :class:`CatalogInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
+        if comment is not None:
+            body["comment"] = comment
         if enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = enable_predictive_optimization.value
-        if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
-        if new_name is not None: body['new_name'] = new_name
-        if options is not None: body['options'] = options
-        if owner is not None: body['owner'] = owner
-        if properties is not None: body['properties'] = properties
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/catalogs/{name}', body=body, headers=headers)
+            body["enable_predictive_optimization"] = enable_predictive_optimization.value
+        if isolation_mode is not None:
+            body["isolation_mode"] = isolation_mode.value
+        if new_name is not None:
+            body["new_name"] = new_name
+        if options is not None:
+            body["options"] = options
+        if owner is not None:
+            body["owner"] = owner
+        if properties is not None:
+            body["properties"] = properties
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/catalogs/{name}",
+            body=body,
+            headers=headers,
+        )
         return CatalogInfo.from_dict(res)
 
 
 class ConnectionsAPI:
     """Connections allow for creating a connection to an external data source.
-    
+
     A connection is an abstraction of an external data source that can be connected from Databricks Compute.
     Creating a connection object is the first step to managing external data sources within Unity Catalog,
     with the second step being creating a data object (catalog, schema, or table) using the connection. Data
     objects derived from a connection can be written to or read from similar to other Unity Catalog data
     objects based on cloud storage. Users may create different types of connections with each connection
-    having a unique set of configuration options to support credential management and other settings."""
+    having a unique set of configuration options to support credential management and other settings.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               connection_type: ConnectionType,
-               options: Dict[str, str],
-               *,
-               comment: Optional[str] = None,
-               properties: Optional[Dict[str, str]] = None,
-               read_only: Optional[bool] = None) -> ConnectionInfo:
+    def create(
+        self,
+        name: str,
+        connection_type: ConnectionType,
+        options: Dict[str, str],
+        *,
+        comment: Optional[str] = None,
+        properties: Optional[Dict[str, str]] = None,
+        read_only: Optional[bool] = None,
+    ) -> ConnectionInfo:
         """Create a connection.
-        
+
         Creates a new connection
-        
+
         Creates a new connection to an external data source. It allows users to specify connection details and
         configurations for interaction with the external server.
-        
+
         :param name: str
           Name of the connection.
         :param connection_type: :class:`ConnectionType`
@@ -9082,60 +10974,88 @@ def create(self,
           An object containing map of key-value properties attached to the connection.
         :param read_only: bool (optional)
           If the connection is read only.
-        
+
         :returns: :class:`ConnectionInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if connection_type is not None: body['connection_type'] = connection_type.value
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options
-        if properties is not None: body['properties'] = properties
-        if read_only is not None: body['read_only'] = read_only
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if comment is not None:
+            body["comment"] = comment
+        if connection_type is not None:
+            body["connection_type"] = connection_type.value
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options
+        if properties is not None:
+            body["properties"] = properties
+        if read_only is not None:
+            body["read_only"] = read_only
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/connections', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/connections",
+            body=body,
+            headers=headers,
+        )
         return ConnectionInfo.from_dict(res)
 
     def delete(self, name: str):
         """Delete a connection.
-        
+
         Deletes the connection that matches the supplied name.
-        
+
         :param name: str
           The name of the connection to be deleted.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/connections/{name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/connections/{name}",
+            headers=headers,
+        )
 
     def get(self, name: str) -> ConnectionInfo:
         """Get a connection.
-        
+
         Gets a connection from it's name.
-        
+
         :param name: str
           Name of the connection.
-        
+
         :returns: :class:`ConnectionInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/connections/{name}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/connections/{name}",
+            headers=headers,
+        )
         return ConnectionInfo.from_dict(res)
 
-    def list(self,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ConnectionInfo]:
+    def list(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ConnectionInfo]:
         """List connections.
-        
+
         List all connections.
-        
+
         :param max_results: int (optional)
           Maximum number of connections to return. - If not set, all connections are returned (not
           recommended). - when set to a value greater than 0, the page length is the minimum of this value and
@@ -9143,35 +11063,47 @@ def list(self,
           (recommended); - when set to a value less than 0, an invalid parameter error is returned;
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`ConnectionInfo`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/connections', query=query, headers=headers)
-            if 'connections' in json:
-                for v in json['connections']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/connections",
+                query=query,
+                headers=headers,
+            )
+            if "connections" in json:
+                for v in json["connections"]:
                     yield ConnectionInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               options: Dict[str, str],
-               *,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None) -> ConnectionInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        name: str,
+        options: Dict[str, str],
+        *,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+    ) -> ConnectionInfo:
         """Update a connection.
-        
+
         Updates the connection that matches the supplied name.
-        
+
         :param name: str
           Name of the connection.
         :param options: Dict[str,str]
@@ -9180,16 +11112,27 @@ def update(self,
           New name for the connection.
         :param owner: str (optional)
           Username of current owner of the connection.
-        
+
         :returns: :class:`ConnectionInfo`
         """
         body = {}
-        if new_name is not None: body['new_name'] = new_name
-        if options is not None: body['options'] = options
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if new_name is not None:
+            body["new_name"] = new_name
+        if options is not None:
+            body["options"] = options
+        if owner is not None:
+            body["owner"] = owner
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/connections/{name}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/connections/{name}",
+            body=body,
+            headers=headers,
+        )
         return ConnectionInfo.from_dict(res)
 
 
@@ -9197,7 +11140,7 @@ class CredentialsAPI:
     """A credential represents an authentication and authorization mechanism for accessing services on your cloud
     tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
     groups can access the credential.
-    
+
     To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
     privilege. The user who creates the credential can delegate ownership to another user or group to manage
     permissions on it."""
@@ -9205,25 +11148,27 @@ class CredentialsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create_credential(self,
-                          name: str,
-                          *,
-                          aws_iam_role: Optional[AwsIamRole] = None,
-                          azure_managed_identity: Optional[AzureManagedIdentity] = None,
-                          azure_service_principal: Optional[AzureServicePrincipal] = None,
-                          comment: Optional[str] = None,
-                          databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
-                          purpose: Optional[CredentialPurpose] = None,
-                          read_only: Optional[bool] = None,
-                          skip_validation: Optional[bool] = None) -> CredentialInfo:
+    def create_credential(
+        self,
+        name: str,
+        *,
+        aws_iam_role: Optional[AwsIamRole] = None,
+        azure_managed_identity: Optional[AzureManagedIdentity] = None,
+        azure_service_principal: Optional[AzureServicePrincipal] = None,
+        comment: Optional[str] = None,
+        databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
+        purpose: Optional[CredentialPurpose] = None,
+        read_only: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+    ) -> CredentialInfo:
         """Create a credential.
-        
+
         Creates a new credential. The type of credential to be created is determined by the **purpose** field,
         which should be either **SERVICE** or **STORAGE**.
-        
+
         The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
         storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
-        
+
         :param name: str
           The credential name. The name must be unique among storage and service credentials within the
           metastore.
@@ -9244,111 +11189,149 @@ def create_credential(self,
           **STORAGE**.
         :param skip_validation: bool (optional)
           Optional. Supplying true to this argument skips validation of the created set of credentials.
-        
+
         :returns: :class:`CredentialInfo`
         """
         body = {}
-        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if aws_iam_role is not None:
+            body["aws_iam_role"] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
-            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+            body["azure_managed_identity"] = azure_managed_identity.as_dict()
         if azure_service_principal is not None:
-            body['azure_service_principal'] = azure_service_principal.as_dict()
-        if comment is not None: body['comment'] = comment
+            body["azure_service_principal"] = azure_service_principal.as_dict()
+        if comment is not None:
+            body["comment"] = comment
         if databricks_gcp_service_account is not None:
-            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
-        if name is not None: body['name'] = name
-        if purpose is not None: body['purpose'] = purpose.value
-        if read_only is not None: body['read_only'] = read_only
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/credentials', body=body, headers=headers)
+            body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict()
+        if name is not None:
+            body["name"] = name
+        if purpose is not None:
+            body["purpose"] = purpose.value
+        if read_only is not None:
+            body["read_only"] = read_only
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/credentials",
+            body=body,
+            headers=headers,
+        )
         return CredentialInfo.from_dict(res)
 
     def delete_credential(self, name_arg: str, *, force: Optional[bool] = None):
         """Delete a credential.
-        
+
         Deletes a service or storage credential from the metastore. The caller must be an owner of the
         credential.
-        
+
         :param name_arg: str
           Name of the credential.
         :param force: bool (optional)
           Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
           external locations and external tables (when purpose is **STORAGE**).
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/credentials/{name_arg}",
+            query=query,
+            headers=headers,
+        )
 
     def generate_temporary_service_credential(
-            self,
-            credential_name: str,
-            *,
-            azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None,
-            gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None
+        self,
+        credential_name: str,
+        *,
+        azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None,
+        gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None,
     ) -> TemporaryCredentials:
         """Generate a temporary service credential.
-        
+
         Returns a set of temporary credentials generated using the specified service credential. The caller
         must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
-        
+
         :param credential_name: str
           The name of the service credential used to generate a temporary credential
         :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
           The Azure cloud options to customize the requested temporary credential
         :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
           The GCP cloud options to customize the requested temporary credential
-        
+
         :returns: :class:`TemporaryCredentials`
         """
         body = {}
-        if azure_options is not None: body['azure_options'] = azure_options.as_dict()
-        if credential_name is not None: body['credential_name'] = credential_name
-        if gcp_options is not None: body['gcp_options'] = gcp_options.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if azure_options is not None:
+            body["azure_options"] = azure_options.as_dict()
+        if credential_name is not None:
+            body["credential_name"] = credential_name
+        if gcp_options is not None:
+            body["gcp_options"] = gcp_options.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           '/api/2.1/unity-catalog/temporary-service-credentials',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/temporary-service-credentials",
+            body=body,
+            headers=headers,
+        )
         return TemporaryCredentials.from_dict(res)
 
     def get_credential(self, name_arg: str) -> CredentialInfo:
         """Get a credential.
-        
+
         Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
         owner of the credential, or have any permission on the credential.
-        
+
         :param name_arg: str
           Name of the credential.
-        
+
         :returns: :class:`CredentialInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/credentials/{name_arg}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/credentials/{name_arg}",
+            headers=headers,
+        )
         return CredentialInfo.from_dict(res)
 
-    def list_credentials(self,
-                         *,
-                         max_results: Optional[int] = None,
-                         page_token: Optional[str] = None,
-                         purpose: Optional[CredentialPurpose] = None) -> Iterator[CredentialInfo]:
+    def list_credentials(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+        purpose: Optional[CredentialPurpose] = None,
+    ) -> Iterator[CredentialInfo]:
         """List credentials.
-        
+
         Gets an array of credentials (as __CredentialInfo__ objects).
-        
+
         The array is limited to only the credentials that the caller has permission to access. If the caller
         is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
         ordering of the elements in the array.
-        
+
         :param max_results: int (optional)
           Maximum number of credentials to return. - If not set, the default max page size is used. - When set
           to a value greater than 0, the page length is the minimum of this value and a server-configured
@@ -9358,46 +11341,58 @@ def list_credentials(self,
           Opaque token to retrieve the next page of results.
         :param purpose: :class:`CredentialPurpose` (optional)
           Return only credentials for the specified purpose.
-        
+
         :returns: Iterator over :class:`CredentialInfo`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if purpose is not None: query['purpose'] = purpose.value
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if purpose is not None:
+            query["purpose"] = purpose.value
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/credentials', query=query, headers=headers)
-            if 'credentials' in json:
-                for v in json['credentials']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/credentials",
+                query=query,
+                headers=headers,
+            )
+            if "credentials" in json:
+                for v in json["credentials"]:
                     yield CredentialInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update_credential(self,
-                          name_arg: str,
-                          *,
-                          aws_iam_role: Optional[AwsIamRole] = None,
-                          azure_managed_identity: Optional[AzureManagedIdentity] = None,
-                          azure_service_principal: Optional[AzureServicePrincipal] = None,
-                          comment: Optional[str] = None,
-                          databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
-                          force: Optional[bool] = None,
-                          isolation_mode: Optional[IsolationMode] = None,
-                          new_name: Optional[str] = None,
-                          owner: Optional[str] = None,
-                          read_only: Optional[bool] = None,
-                          skip_validation: Optional[bool] = None) -> CredentialInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update_credential(
+        self,
+        name_arg: str,
+        *,
+        aws_iam_role: Optional[AwsIamRole] = None,
+        azure_managed_identity: Optional[AzureManagedIdentity] = None,
+        azure_service_principal: Optional[AzureServicePrincipal] = None,
+        comment: Optional[str] = None,
+        databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
+        force: Optional[bool] = None,
+        isolation_mode: Optional[IsolationMode] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        read_only: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+    ) -> CredentialInfo:
         """Update a credential.
-        
+
         Updates a service or storage credential on the metastore.
-        
+
         The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
         If the caller is a metastore admin, only the __owner__ field can be changed.
-        
+
         :param name_arg: str
           Name of the credential.
         :param aws_iam_role: :class:`AwsIamRole` (optional)
@@ -9424,57 +11419,72 @@ def update_credential(self,
           **STORAGE**.
         :param skip_validation: bool (optional)
           Supply true to this argument to skip validation of the updated credential.
-        
+
         :returns: :class:`CredentialInfo`
         """
         body = {}
-        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if aws_iam_role is not None:
+            body["aws_iam_role"] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
-            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+            body["azure_managed_identity"] = azure_managed_identity.as_dict()
         if azure_service_principal is not None:
-            body['azure_service_principal'] = azure_service_principal.as_dict()
-        if comment is not None: body['comment'] = comment
+            body["azure_service_principal"] = azure_service_principal.as_dict()
+        if comment is not None:
+            body["comment"] = comment
         if databricks_gcp_service_account is not None:
-            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
-        if force is not None: body['force'] = force
-        if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if read_only is not None: body['read_only'] = read_only
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/credentials/{name_arg}',
-                           body=body,
-                           headers=headers)
+            body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict()
+        if force is not None:
+            body["force"] = force
+        if isolation_mode is not None:
+            body["isolation_mode"] = isolation_mode.value
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if read_only is not None:
+            body["read_only"] = read_only
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/credentials/{name_arg}",
+            body=body,
+            headers=headers,
+        )
         return CredentialInfo.from_dict(res)
 
-    def validate_credential(self,
-                            *,
-                            aws_iam_role: Optional[AwsIamRole] = None,
-                            azure_managed_identity: Optional[AzureManagedIdentity] = None,
-                            credential_name: Optional[str] = None,
-                            external_location_name: Optional[str] = None,
-                            purpose: Optional[CredentialPurpose] = None,
-                            read_only: Optional[bool] = None,
-                            url: Optional[str] = None) -> ValidateCredentialResponse:
+    def validate_credential(
+        self,
+        *,
+        aws_iam_role: Optional[AwsIamRole] = None,
+        azure_managed_identity: Optional[AzureManagedIdentity] = None,
+        credential_name: Optional[str] = None,
+        external_location_name: Optional[str] = None,
+        purpose: Optional[CredentialPurpose] = None,
+        read_only: Optional[bool] = None,
+        url: Optional[str] = None,
+    ) -> ValidateCredentialResponse:
         """Validate a credential.
-        
+
         Validates a credential.
-        
+
         For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
         credential must be provided.
-        
+
         For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
         __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
         both are provided, the __url__ will be used for validation, and __external_location_name__ will be
         ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
         credential must be provided.
-        
+
         The caller must be a metastore admin or the credential owner or have the required permission on the
         metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
-        
+
         :param aws_iam_role: :class:`AwsIamRole` (optional)
           The AWS IAM role configuration
         :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
@@ -9491,21 +11501,35 @@ def validate_credential(self,
           (purpose is **STORAGE**.)
         :param url: str (optional)
           The external location url to validate. Only applicable when purpose is **STORAGE**.
-        
+
         :returns: :class:`ValidateCredentialResponse`
         """
         body = {}
-        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if aws_iam_role is not None:
+            body["aws_iam_role"] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
-            body['azure_managed_identity'] = azure_managed_identity.as_dict()
-        if credential_name is not None: body['credential_name'] = credential_name
-        if external_location_name is not None: body['external_location_name'] = external_location_name
-        if purpose is not None: body['purpose'] = purpose.value
-        if read_only is not None: body['read_only'] = read_only
-        if url is not None: body['url'] = url
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/validate-credentials', body=body, headers=headers)
+            body["azure_managed_identity"] = azure_managed_identity.as_dict()
+        if credential_name is not None:
+            body["credential_name"] = credential_name
+        if external_location_name is not None:
+            body["external_location_name"] = external_location_name
+        if purpose is not None:
+            body["purpose"] = purpose.value
+        if read_only is not None:
+            body["read_only"] = read_only
+        if url is not None:
+            body["url"] = url
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/validate-credentials",
+            body=body,
+            headers=headers,
+        )
         return ValidateCredentialResponse.from_dict(res)
 
 
@@ -9515,32 +11539,34 @@ class ExternalLocationsAPI:
     access-control policies that control which users and groups can access the credential. If a user does not
     have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt
     to authenticate to your cloud tenant on the user’s behalf.
-    
+
     Databricks recommends using external locations rather than using storage credentials directly.
-    
+
     To create external locations, you must be a metastore admin or a user with the
     **CREATE_EXTERNAL_LOCATION** privilege."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               url: str,
-               credential_name: str,
-               *,
-               access_point: Optional[str] = None,
-               comment: Optional[str] = None,
-               encryption_details: Optional[EncryptionDetails] = None,
-               fallback: Optional[bool] = None,
-               read_only: Optional[bool] = None,
-               skip_validation: Optional[bool] = None) -> ExternalLocationInfo:
+    def create(
+        self,
+        name: str,
+        url: str,
+        credential_name: str,
+        *,
+        access_point: Optional[str] = None,
+        comment: Optional[str] = None,
+        encryption_details: Optional[EncryptionDetails] = None,
+        fallback: Optional[bool] = None,
+        read_only: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+    ) -> ExternalLocationInfo:
         """Create an external location.
-        
+
         Creates a new external location entry in the metastore. The caller must be a metastore admin or have
         the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
         credential.
-        
+
         :param name: str
           Name of the external location.
         :param url: str
@@ -9561,83 +11587,112 @@ def create(self,
           Indicates whether the external location is read-only.
         :param skip_validation: bool (optional)
           Skips validation of the storage credential associated with the external location.
-        
+
         :returns: :class:`ExternalLocationInfo`
         """
         body = {}
-        if access_point is not None: body['access_point'] = access_point
-        if comment is not None: body['comment'] = comment
-        if credential_name is not None: body['credential_name'] = credential_name
-        if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict()
-        if fallback is not None: body['fallback'] = fallback
-        if name is not None: body['name'] = name
-        if read_only is not None: body['read_only'] = read_only
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        if url is not None: body['url'] = url
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/external-locations', body=body, headers=headers)
+        if access_point is not None:
+            body["access_point"] = access_point
+        if comment is not None:
+            body["comment"] = comment
+        if credential_name is not None:
+            body["credential_name"] = credential_name
+        if encryption_details is not None:
+            body["encryption_details"] = encryption_details.as_dict()
+        if fallback is not None:
+            body["fallback"] = fallback
+        if name is not None:
+            body["name"] = name
+        if read_only is not None:
+            body["read_only"] = read_only
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        if url is not None:
+            body["url"] = url
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/external-locations",
+            body=body,
+            headers=headers,
+        )
         return ExternalLocationInfo.from_dict(res)
 
     def delete(self, name: str, *, force: Optional[bool] = None):
         """Delete an external location.
-        
+
         Deletes the specified external location from the metastore. The caller must be the owner of the
         external location.
-        
+
         :param name: str
           Name of the external location.
         :param force: bool (optional)
           Force deletion even if there are dependent external tables or mounts.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/unity-catalog/external-locations/{name}',
-                     query=query,
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/external-locations/{name}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, name: str, *, include_browse: Optional[bool] = None) -> ExternalLocationInfo:
         """Get an external location.
-        
+
         Gets an external location from the metastore. The caller must be either a metastore admin, the owner
         of the external location, or a user that has some privilege on the external location.
-        
+
         :param name: str
           Name of the external location.
         :param include_browse: bool (optional)
           Whether to include external locations in the response for which the principal can only access
           selective metadata for
-        
+
         :returns: :class:`ExternalLocationInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/external-locations/{name}',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/external-locations/{name}",
+            query=query,
+            headers=headers,
+        )
         return ExternalLocationInfo.from_dict(res)
 
-    def list(self,
-             *,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ExternalLocationInfo]:
+    def list(
+        self,
+        *,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ExternalLocationInfo]:
         """List external locations.
-        
+
         Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
         must be a metastore admin, the owner of the external location, or a user that has some privilege on
         the external location. There is no guarantee of a specific ordering of the elements in the array.
-        
+
         :param include_browse: bool (optional)
           Whether to include external locations in the response for which the principal can only access
           selective metadata for
@@ -9648,50 +11703,60 @@ def list(self,
           value (recommended); - when set to a value less than 0, an invalid parameter error is returned;
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`ExternalLocationInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/unity-catalog/external-locations',
-                                query=query,
-                                headers=headers)
-            if 'external_locations' in json:
-                for v in json['external_locations']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/external-locations",
+                query=query,
+                headers=headers,
+            )
+            if "external_locations" in json:
+                for v in json["external_locations"]:
                     yield ExternalLocationInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               access_point: Optional[str] = None,
-               comment: Optional[str] = None,
-               credential_name: Optional[str] = None,
-               encryption_details: Optional[EncryptionDetails] = None,
-               fallback: Optional[bool] = None,
-               force: Optional[bool] = None,
-               isolation_mode: Optional[IsolationMode] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               read_only: Optional[bool] = None,
-               skip_validation: Optional[bool] = None,
-               url: Optional[str] = None) -> ExternalLocationInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        name: str,
+        *,
+        access_point: Optional[str] = None,
+        comment: Optional[str] = None,
+        credential_name: Optional[str] = None,
+        encryption_details: Optional[EncryptionDetails] = None,
+        fallback: Optional[bool] = None,
+        force: Optional[bool] = None,
+        isolation_mode: Optional[IsolationMode] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        read_only: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+        url: Optional[str] = None,
+    ) -> ExternalLocationInfo:
         """Update an external location.
-        
+
         Updates an external location in the metastore. The caller must be the owner of the external location,
         or be a metastore admin. In the second case, the admin can only update the name of the external
         location.
-        
+
         :param name: str
           Name of the external location.
         :param access_point: str (optional)
@@ -9719,130 +11784,175 @@ def update(self,
           Skips validation of the storage credential associated with the external location.
         :param url: str (optional)
           Path URL of the external location.
-        
+
         :returns: :class:`ExternalLocationInfo`
         """
         body = {}
-        if access_point is not None: body['access_point'] = access_point
-        if comment is not None: body['comment'] = comment
-        if credential_name is not None: body['credential_name'] = credential_name
-        if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict()
-        if fallback is not None: body['fallback'] = fallback
-        if force is not None: body['force'] = force
-        if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if read_only is not None: body['read_only'] = read_only
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        if url is not None: body['url'] = url
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/external-locations/{name}',
-                           body=body,
-                           headers=headers)
+        if access_point is not None:
+            body["access_point"] = access_point
+        if comment is not None:
+            body["comment"] = comment
+        if credential_name is not None:
+            body["credential_name"] = credential_name
+        if encryption_details is not None:
+            body["encryption_details"] = encryption_details.as_dict()
+        if fallback is not None:
+            body["fallback"] = fallback
+        if force is not None:
+            body["force"] = force
+        if isolation_mode is not None:
+            body["isolation_mode"] = isolation_mode.value
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if read_only is not None:
+            body["read_only"] = read_only
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        if url is not None:
+            body["url"] = url
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/external-locations/{name}",
+            body=body,
+            headers=headers,
+        )
         return ExternalLocationInfo.from_dict(res)
 
 
 class FunctionsAPI:
     """Functions implement User-Defined Functions (UDFs) in Unity Catalog.
-    
+
     The function implementation can be any SQL expression or Query, and it can be invoked wherever a table
     reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it
-    can be referenced with the form __catalog_name__.__schema_name__.__function_name__."""
+    can be referenced with the form __catalog_name__.__schema_name__.__function_name__.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create(self, function_info: CreateFunction) -> FunctionInfo:
         """Create a function.
-        
+
         **WARNING: This API is experimental and will change in future versions**
-        
+
         Creates a new function
-        
+
         The user must have the following permissions in order for the function to be created: -
         **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the
         function's parent schema
-        
+
         :param function_info: :class:`CreateFunction`
           Partial __FunctionInfo__ specifying the function to be created.
-        
+
         :returns: :class:`FunctionInfo`
         """
         body = {}
-        if function_info is not None: body['function_info'] = function_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if function_info is not None:
+            body["function_info"] = function_info.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/functions', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/functions",
+            body=body,
+            headers=headers,
+        )
         return FunctionInfo.from_dict(res)
 
     def delete(self, name: str, *, force: Optional[bool] = None):
         """Delete a function.
-        
+
         Deletes the function that matches the supplied name. For the deletion to succeed, the user must
         satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the
         owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog -
         Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog
         and the **USE_SCHEMA** privilege on its parent schema
-        
+
         :param name: str
           The fully-qualified name of the function (of the form
           __catalog_name__.__schema_name__.__function__name__).
         :param force: bool (optional)
           Force deletion even if the function is notempty.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/functions/{name}', query=query, headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/functions/{name}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, name: str, *, include_browse: Optional[bool] = None) -> FunctionInfo:
         """Get a function.
-        
+
         Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must
         satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's
         parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner
         of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the
         **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the
         function itself
-        
+
         :param name: str
           The fully-qualified name of the function (of the form
           __catalog_name__.__schema_name__.__function__name__).
         :param include_browse: bool (optional)
           Whether to include functions in the response for which the principal can only access selective
           metadata for
-        
+
         :returns: :class:`FunctionInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/functions/{name}', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/functions/{name}",
+            query=query,
+            headers=headers,
+        )
         return FunctionInfo.from_dict(res)
 
-    def list(self,
-             catalog_name: str,
-             schema_name: str,
-             *,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[FunctionInfo]:
+    def list(
+        self,
+        catalog_name: str,
+        schema_name: str,
+        *,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[FunctionInfo]:
         """List functions.
-        
+
         List functions within the specified parent catalog and schema. If the user is a metastore admin, all
         functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege
         on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only
         functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is
         no guarantee of a specific ordering of the elements in the array.
-        
+
         :param catalog_name: str
           Name of parent catalog for functions of interest.
         :param schema_name: str
@@ -9857,50 +11967,71 @@ def list(self,
           (recommended); - when set to a value less than 0, an invalid parameter error is returned;
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`FunctionInfo`
         """
 
         query = {}
-        if catalog_name is not None: query['catalog_name'] = catalog_name
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if schema_name is not None: query['schema_name'] = schema_name
-        headers = {'Accept': 'application/json', }
+        if catalog_name is not None:
+            query["catalog_name"] = catalog_name
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if schema_name is not None:
+            query["schema_name"] = schema_name
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/functions', query=query, headers=headers)
-            if 'functions' in json:
-                for v in json['functions']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/functions",
+                query=query,
+                headers=headers,
+            )
+            if "functions" in json:
+                for v in json["functions"]:
                     yield FunctionInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo:
         """Update a function.
-        
+
         Updates the function that matches the supplied name. Only the owner of the function can be updated. If
         the user is not a metastore admin, the user must be a member of the group that is the new function
         owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the
         function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of
         the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the
         **USE_SCHEMA** privilege on the function's parent schema.
-        
+
         :param name: str
           The fully-qualified name of the function (of the form
           __catalog_name__.__schema_name__.__function__name__).
         :param owner: str (optional)
           Username of current owner of function.
-        
+
         :returns: :class:`FunctionInfo`
         """
         body = {}
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if owner is not None:
+            body["owner"] = owner
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/functions/{name}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/functions/{name}",
+            body=body,
+            headers=headers,
+        )
         return FunctionInfo.from_dict(res)
 
 
@@ -9909,7 +12040,7 @@ class GrantsAPI:
     Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or
     schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are
     inherited downward.
-    
+
     Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that
     granting a privilege on the catalog automatically grants the privilege to all current and future objects
     within the catalog. Similarly, privileges granted on a schema are inherited by all current and future
@@ -9918,44 +12049,53 @@ class GrantsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def get(self,
-            securable_type: SecurableType,
-            full_name: str,
-            *,
-            principal: Optional[str] = None) -> PermissionsList:
+    def get(
+        self,
+        securable_type: SecurableType,
+        full_name: str,
+        *,
+        principal: Optional[str] = None,
+    ) -> PermissionsList:
         """Get permissions.
-        
+
         Gets the permissions for a securable.
-        
+
         :param securable_type: :class:`SecurableType`
           Type of securable.
         :param full_name: str
           Full name of securable.
         :param principal: str (optional)
           If provided, only the permissions for the specified principal (user or group) are returned.
-        
+
         :returns: :class:`PermissionsList`
         """
 
         query = {}
-        if principal is not None: query['principal'] = principal
-        headers = {'Accept': 'application/json', }
+        if principal is not None:
+            query["principal"] = principal
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}",
+            query=query,
+            headers=headers,
+        )
         return PermissionsList.from_dict(res)
 
-    def get_effective(self,
-                      securable_type: SecurableType,
-                      full_name: str,
-                      *,
-                      principal: Optional[str] = None) -> EffectivePermissionsList:
+    def get_effective(
+        self,
+        securable_type: SecurableType,
+        full_name: str,
+        *,
+        principal: Optional[str] = None,
+    ) -> EffectivePermissionsList:
         """Get effective permissions.
-        
+
         Gets the effective permissions for a securable.
-        
+
         :param securable_type: :class:`SecurableType`
           Type of securable.
         :param full_name: str
@@ -9963,46 +12103,59 @@ def get_effective(self,
         :param principal: str (optional)
           If provided, only the effective permissions for the specified principal (user or group) are
           returned.
-        
+
         :returns: :class:`EffectivePermissionsList`
         """
 
         query = {}
-        if principal is not None: query['principal'] = principal
-        headers = {'Accept': 'application/json', }
+        if principal is not None:
+            query["principal"] = principal
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/effective-permissions/{securable_type.value}/{full_name}',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/effective-permissions/{securable_type.value}/{full_name}",
+            query=query,
+            headers=headers,
+        )
         return EffectivePermissionsList.from_dict(res)
 
-    def update(self,
-               securable_type: SecurableType,
-               full_name: str,
-               *,
-               changes: Optional[List[PermissionsChange]] = None) -> PermissionsList:
+    def update(
+        self,
+        securable_type: SecurableType,
+        full_name: str,
+        *,
+        changes: Optional[List[PermissionsChange]] = None,
+    ) -> PermissionsList:
         """Update permissions.
-        
+
         Updates the permissions for a securable.
-        
+
         :param securable_type: :class:`SecurableType`
           Type of securable.
         :param full_name: str
           Full name of securable.
         :param changes: List[:class:`PermissionsChange`] (optional)
           Array of permissions change objects.
-        
+
         :returns: :class:`PermissionsList`
         """
         body = {}
-        if changes is not None: body['changes'] = [v.as_dict() for v in changes]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if changes is not None:
+            body["changes"] = [v.as_dict() for v in changes]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}",
+            body=body,
+            headers=headers,
+        )
         return PermissionsList.from_dict(res)
 
 
@@ -10011,10 +12164,10 @@ class MetastoresAPI:
     views) and the permissions that govern access to them. Databricks account admins can create metastores and
     assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use
     Unity Catalog, it must have a Unity Catalog metastore attached.
-    
+
     Each metastore is configured with a root storage location in a cloud storage account. This storage
     location is used for metadata and managed tables data.
-    
+
     NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity
     Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is
     available in a catalog named hive_metastore."""
@@ -10024,11 +12177,11 @@ def __init__(self, api_client):
 
     def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str):
         """Create an assignment.
-        
+
         Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be
         overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account
         admin.
-        
+
         :param workspace_id: int
           A workspace ID.
         :param metastore_id: str
@@ -10036,31 +12189,40 @@ def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str
         :param default_catalog_name: str
           The name of the default catalog in the metastore. This field is depracted. Please use "Default
           Namespace API" to configure the default catalog for a Databricks workspace.
-        
-        
+
+
         """
         body = {}
-        if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name
-        if metastore_id is not None: body['metastore_id'] = metastore_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PUT',
-                     f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore',
-                     body=body,
-                     headers=headers)
+        if default_catalog_name is not None:
+            body["default_catalog_name"] = default_catalog_name
+        if metastore_id is not None:
+            body["metastore_id"] = metastore_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-    def create(self,
-               name: str,
-               *,
-               region: Optional[str] = None,
-               storage_root: Optional[str] = None) -> MetastoreInfo:
+        self._api.do(
+            "PUT",
+            f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore",
+            body=body,
+            headers=headers,
+        )
+
+    def create(
+        self,
+        name: str,
+        *,
+        region: Optional[str] = None,
+        storage_root: Optional[str] = None,
+    ) -> MetastoreInfo:
         """Create a metastore.
-        
+
         Creates a new metastore based on a provided name and optional storage root path. By default (if the
         __owner__ field is not set), the owner of the new metastore is the user calling the
         __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is
         assigned to the System User instead.
-        
+
         :param name: str
           The user-specified name of the metastore.
         :param region: str (optional)
@@ -10069,133 +12231,171 @@ def create(self,
           the region of the workspace receiving the request will be used.
         :param storage_root: str (optional)
           The storage root URL for metastore
-        
+
         :returns: :class:`MetastoreInfo`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if region is not None: body['region'] = region
-        if storage_root is not None: body['storage_root'] = storage_root
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if name is not None:
+            body["name"] = name
+        if region is not None:
+            body["region"] = region
+        if storage_root is not None:
+            body["storage_root"] = storage_root
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/metastores', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/metastores",
+            body=body,
+            headers=headers,
+        )
         return MetastoreInfo.from_dict(res)
 
     def current(self) -> MetastoreAssignment:
         """Get metastore assignment for workspace.
-        
+
         Gets the metastore assignment for the workspace being accessed.
-        
+
         :returns: :class:`MetastoreAssignment`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.1/unity-catalog/current-metastore-assignment', headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.1/unity-catalog/current-metastore-assignment",
+            headers=headers,
+        )
         return MetastoreAssignment.from_dict(res)
 
     def delete(self, id: str, *, force: Optional[bool] = None):
         """Delete a metastore.
-        
+
         Deletes a metastore. The caller must be a metastore admin.
-        
+
         :param id: str
           Unique ID of the metastore.
         :param force: bool (optional)
           Force deletion even if the metastore is not empty. Default is false.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/metastores/{id}', query=query, headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/metastores/{id}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, id: str) -> MetastoreInfo:
         """Get a metastore.
-        
+
         Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this
         info.
-        
+
         :param id: str
           Unique ID of the metastore.
-        
+
         :returns: :class:`MetastoreInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/metastores/{id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.1/unity-catalog/metastores/{id}", headers=headers)
         return MetastoreInfo.from_dict(res)
 
     def list(self) -> Iterator[MetastoreInfo]:
         """List metastores.
-        
+
         Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin
         to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.
-        
+
         :returns: Iterator over :class:`MetastoreInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.1/unity-catalog/metastores', headers=headers)
+        json = self._api.do("GET", "/api/2.1/unity-catalog/metastores", headers=headers)
         parsed = ListMetastoresResponse.from_dict(json).metastores
         return parsed if parsed is not None else []
 
     def summary(self) -> GetMetastoreSummaryResponse:
         """Get a metastore summary.
-        
+
         Gets information about a metastore. This summary includes the storage credential, the cloud vendor,
         the cloud region, and the global metastore ID.
-        
+
         :returns: :class:`GetMetastoreSummaryResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.1/unity-catalog/metastore_summary', headers=headers)
+        res = self._api.do("GET", "/api/2.1/unity-catalog/metastore_summary", headers=headers)
         return GetMetastoreSummaryResponse.from_dict(res)
 
     def unassign(self, workspace_id: int, metastore_id: str):
         """Delete an assignment.
-        
+
         Deletes a metastore assignment. The caller must be an account administrator.
-        
+
         :param workspace_id: int
           A workspace ID.
         :param metastore_id: str
           Query for the ID of the metastore to delete.
-        
-        
+
+
         """
 
         query = {}
-        if metastore_id is not None: query['metastore_id'] = metastore_id
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE',
-                     f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore',
-                     query=query,
-                     headers=headers)
-
-    def update(self,
-               id: str,
-               *,
-               delta_sharing_organization_name: Optional[str] = None,
-               delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None,
-               delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               privilege_model_version: Optional[str] = None,
-               storage_root_credential_id: Optional[str] = None) -> MetastoreInfo:
+        if metastore_id is not None:
+            query["metastore_id"] = metastore_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore",
+            query=query,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        id: str,
+        *,
+        delta_sharing_organization_name: Optional[str] = None,
+        delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None,
+        delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        privilege_model_version: Optional[str] = None,
+        storage_root_credential_id: Optional[str] = None,
+    ) -> MetastoreInfo:
         """Update a metastore.
-        
+
         Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__
         field is set to the empty string (**""**), the ownership is updated to the System User.
-        
+
         :param id: str
           Unique ID of the metastore.
         :param delta_sharing_organization_name: str (optional)
@@ -10213,38 +12413,53 @@ def update(self,
           Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).
         :param storage_root_credential_id: str (optional)
           UUID of storage credential to access the metastore storage_root.
-        
+
         :returns: :class:`MetastoreInfo`
         """
         body = {}
         if delta_sharing_organization_name is not None:
-            body['delta_sharing_organization_name'] = delta_sharing_organization_name
+            body["delta_sharing_organization_name"] = delta_sharing_organization_name
         if delta_sharing_recipient_token_lifetime_in_seconds is not None:
-            body[
-                'delta_sharing_recipient_token_lifetime_in_seconds'] = delta_sharing_recipient_token_lifetime_in_seconds
-        if delta_sharing_scope is not None: body['delta_sharing_scope'] = delta_sharing_scope.value
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if privilege_model_version is not None: body['privilege_model_version'] = privilege_model_version
+            body["delta_sharing_recipient_token_lifetime_in_seconds"] = (
+                delta_sharing_recipient_token_lifetime_in_seconds
+            )
+        if delta_sharing_scope is not None:
+            body["delta_sharing_scope"] = delta_sharing_scope.value
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if privilege_model_version is not None:
+            body["privilege_model_version"] = privilege_model_version
         if storage_root_credential_id is not None:
-            body['storage_root_credential_id'] = storage_root_credential_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["storage_root_credential_id"] = storage_root_credential_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/metastores/{id}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/metastores/{id}",
+            body=body,
+            headers=headers,
+        )
         return MetastoreInfo.from_dict(res)
 
-    def update_assignment(self,
-                          workspace_id: int,
-                          *,
-                          default_catalog_name: Optional[str] = None,
-                          metastore_id: Optional[str] = None):
+    def update_assignment(
+        self,
+        workspace_id: int,
+        *,
+        default_catalog_name: Optional[str] = None,
+        metastore_id: Optional[str] = None,
+    ):
         """Update an assignment.
-        
+
         Updates a metastore assignment. This operation can be used to update __metastore_id__ or
         __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore.
         The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a
         Workspace admin.
-        
+
         :param workspace_id: int
           A workspace ID.
         :param default_catalog_name: str (optional)
@@ -10252,69 +12467,81 @@ def update_assignment(self,
           Namespace API" to configure the default catalog for a Databricks workspace.
         :param metastore_id: str (optional)
           The unique ID of the metastore.
-        
-        
+
+
         """
         body = {}
-        if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name
-        if metastore_id is not None: body['metastore_id'] = metastore_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if default_catalog_name is not None:
+            body["default_catalog_name"] = default_catalog_name
+        if metastore_id is not None:
+            body["metastore_id"] = metastore_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH',
-                     f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore',
-                     body=body,
-                     headers=headers)
+        self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore",
+            body=body,
+            headers=headers,
+        )
 
 
 class ModelVersionsAPI:
     """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
     provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
     workspaces.
-    
+
     This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more
-    details, see the [registered models API docs](/api/workspace/registeredmodels)."""
+    details, see the [registered models API docs](/api/workspace/registeredmodels).
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def delete(self, full_name: str, version: int):
         """Delete a Model Version.
-        
+
         Deletes a model version from the specified registered model. Any aliases assigned to the model version
         will also be deleted.
-        
+
         The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
         the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the model version
         :param version: int
           The integer version number of the model version
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE',
-                     f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
-                     headers=headers)
-
-    def get(self,
-            full_name: str,
-            version: int,
-            *,
-            include_aliases: Optional[bool] = None,
-            include_browse: Optional[bool] = None) -> ModelVersionInfo:
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}",
+            headers=headers,
+        )
+
+    def get(
+        self,
+        full_name: str,
+        version: int,
+        *,
+        include_aliases: Optional[bool] = None,
+        include_browse: Optional[bool] = None,
+    ) -> ModelVersionInfo:
         """Get a Model Version.
-        
+
         Get a model version.
-        
+
         The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent
         registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
         privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the model version
         :param version: int
@@ -10324,74 +12551,89 @@ def get(self,
         :param include_browse: bool (optional)
           Whether to include model versions in the response for which the principal can only access selective
           metadata for
-        
+
         :returns: :class:`ModelVersionInfo`
         """
 
         query = {}
-        if include_aliases is not None: query['include_aliases'] = include_aliases
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
-                           query=query,
-                           headers=headers)
+        if include_aliases is not None:
+            query["include_aliases"] = include_aliases
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}",
+            query=query,
+            headers=headers,
+        )
         return ModelVersionInfo.from_dict(res)
 
-    def get_by_alias(self,
-                     full_name: str,
-                     alias: str,
-                     *,
-                     include_aliases: Optional[bool] = None) -> ModelVersionInfo:
+    def get_by_alias(
+        self,
+        full_name: str,
+        alias: str,
+        *,
+        include_aliases: Optional[bool] = None,
+    ) -> ModelVersionInfo:
         """Get Model Version By Alias.
-        
+
         Get a model version by alias.
-        
+
         The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
         registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
         privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the registered model
         :param alias: str
           The name of the alias
         :param include_aliases: bool (optional)
           Whether to include aliases associated with the model version in the response
-        
+
         :returns: :class:`ModelVersionInfo`
         """
 
         query = {}
-        if include_aliases is not None: query['include_aliases'] = include_aliases
-        headers = {'Accept': 'application/json', }
+        if include_aliases is not None:
+            query["include_aliases"] = include_aliases
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}",
+            query=query,
+            headers=headers,
+        )
         return ModelVersionInfo.from_dict(res)
 
-    def list(self,
-             full_name: str,
-             *,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ModelVersionInfo]:
+    def list(
+        self,
+        full_name: str,
+        *,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ModelVersionInfo]:
         """List Model Versions.
-        
+
         List model versions. You can list model versions under a particular schema, or list all model versions
         in the current metastore.
-        
+
         The returned models are filtered based on the privileges of the calling user. For example, the
         metastore admin is able to list all the model versions. A regular user needs to be the owner or have
         the **EXECUTE** privilege on the parent registered model to recieve the model versions in the
         response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
         on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         There is no guarantee of a specific ordering of the elements in the response. The elements in the
         response will not contain any aliases or tags.
-        
+
         :param full_name: str
           The full three-level name of the registered model under which to list model versions
         :param include_browse: bool (optional)
@@ -10405,56 +12647,69 @@ def list(self,
           value less than 0, an invalid parameter error is returned;
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`ModelVersionInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/unity-catalog/models/{full_name}/versions',
-                                query=query,
-                                headers=headers)
-            if 'model_versions' in json:
-                for v in json['model_versions']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/unity-catalog/models/{full_name}/versions",
+                query=query,
+                headers=headers,
+            )
+            if "model_versions" in json:
+                for v in json["model_versions"]:
                     yield ModelVersionInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, full_name: str, version: int, *, comment: Optional[str] = None) -> ModelVersionInfo:
         """Update a Model Version.
-        
+
         Updates the specified model version.
-        
+
         The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
         the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         Currently only the comment of the model version can be updated.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the model version
         :param version: int
           The integer version number of the model version
         :param comment: str (optional)
           The comment attached to the model version
-        
+
         :returns: :class:`ModelVersionInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if comment is not None:
+            body["comment"] = comment
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}",
+            body=body,
+            headers=headers,
+        )
         return ModelVersionInfo.from_dict(res)
 
 
@@ -10464,161 +12719,179 @@ class OnlineTablesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_online_table_active(self,
-                                     name: str,
-                                     timeout=timedelta(minutes=20),
-                                     callback: Optional[Callable[[OnlineTable], None]] = None) -> OnlineTable:
+    def wait_get_online_table_active(
+        self,
+        name: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[OnlineTable], None]] = None,
+    ) -> OnlineTable:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ProvisioningInfoState.ACTIVE, )
-        failure_states = (ProvisioningInfoState.FAILED, )
-        status_message = 'polling...'
+        target_states = (ProvisioningInfoState.ACTIVE,)
+        failure_states = (ProvisioningInfoState.FAILED,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(name=name)
             status = poll.unity_catalog_provisioning_state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach ACTIVE, got {status}: {status_message}'
+                msg = f"failed to reach ACTIVE, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"name={name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]:
         """Create an Online Table.
-        
+
         Create a new Online Table.
-        
+
         :param table: :class:`OnlineTable` (optional)
           Online Table information.
-        
+
         :returns:
           Long-running operation waiter for :class:`OnlineTable`.
           See :method:wait_get_online_table_active for more details.
         """
         body = table.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
-        return Wait(self.wait_get_online_table_active,
-                    response=OnlineTable.from_dict(op_response),
-                    name=op_response['name'])
-
-    def create_and_wait(self,
-                        *,
-                        table: Optional[OnlineTable] = None,
-                        timeout=timedelta(minutes=20)) -> OnlineTable:
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.0/online-tables", body=body, headers=headers)
+        return Wait(
+            self.wait_get_online_table_active,
+            response=OnlineTable.from_dict(op_response),
+            name=op_response["name"],
+        )
+
+    def create_and_wait(
+        self,
+        *,
+        table: Optional[OnlineTable] = None,
+        timeout=timedelta(minutes=20),
+    ) -> OnlineTable:
         return self.create(table=table).result(timeout=timeout)
 
     def delete(self, name: str):
         """Delete an Online Table.
-        
+
         Delete an online table. Warning: This will delete all the data in the online table. If the source
         Delta table was deleted or modified since this Online Table was created, this will lose the data
         forever!
-        
+
         :param name: str
           Full three-part (catalog, schema, table) name of the table.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/online-tables/{name}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/online-tables/{name}", headers=headers)
 
     def get(self, name: str) -> OnlineTable:
         """Get an Online Table.
-        
+
         Get information about an existing online table and its status.
-        
+
         :param name: str
           Full three-part (catalog, schema, table) name of the table.
-        
+
         :returns: :class:`OnlineTable`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/online-tables/{name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/online-tables/{name}", headers=headers)
         return OnlineTable.from_dict(res)
 
 
 class QualityMonitorsAPI:
     """A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics
     tables and a dashboard that you can use to monitor table health and set alerts.
-    
+
     Most write operations require the user to be the owner of the table (or its parent schema or parent
     catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have
-    **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**)."""
+    **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def cancel_refresh(self, table_name: str, refresh_id: str):
         """Cancel refresh.
-        
+
         Cancel an active monitor refresh for the given refresh ID.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
         owner of the table
-        
+
         Additionally, the call must be made from the workspace where the monitor was created.
-        
+
         :param table_name: str
           Full name of the table.
         :param refresh_id: str
           ID of the refresh.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('POST',
-                     f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}/cancel',
-                     headers=headers)
-
-    def create(self,
-               table_name: str,
-               assets_dir: str,
-               output_schema_name: str,
-               *,
-               baseline_table_name: Optional[str] = None,
-               custom_metrics: Optional[List[MonitorMetric]] = None,
-               data_classification_config: Optional[MonitorDataClassificationConfig] = None,
-               inference_log: Optional[MonitorInferenceLog] = None,
-               notifications: Optional[MonitorNotifications] = None,
-               schedule: Optional[MonitorCronSchedule] = None,
-               skip_builtin_dashboard: Optional[bool] = None,
-               slicing_exprs: Optional[List[str]] = None,
-               snapshot: Optional[MonitorSnapshot] = None,
-               time_series: Optional[MonitorTimeSeries] = None,
-               warehouse_id: Optional[str] = None) -> MonitorInfo:
+        self._api.do(
+            "POST",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}/cancel",
+            headers=headers,
+        )
+
+    def create(
+        self,
+        table_name: str,
+        assets_dir: str,
+        output_schema_name: str,
+        *,
+        baseline_table_name: Optional[str] = None,
+        custom_metrics: Optional[List[MonitorMetric]] = None,
+        data_classification_config: Optional[MonitorDataClassificationConfig] = None,
+        inference_log: Optional[MonitorInferenceLog] = None,
+        notifications: Optional[MonitorNotifications] = None,
+        schedule: Optional[MonitorCronSchedule] = None,
+        skip_builtin_dashboard: Optional[bool] = None,
+        slicing_exprs: Optional[List[str]] = None,
+        snapshot: Optional[MonitorSnapshot] = None,
+        time_series: Optional[MonitorTimeSeries] = None,
+        warehouse_id: Optional[str] = None,
+    ) -> MonitorInfo:
         """Create a table monitor.
-        
+
         Creates a new monitor for the specified table.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the
         table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's
         parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3.
         have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on
         the table's parent schema - be an owner of the table.
-        
+
         Workspace assets, such as the dashboard, will be created in the workspace where this call was made.
-        
+
         :param table_name: str
           Full name of the table.
         :param assets_dir: str
@@ -10652,222 +12925,268 @@ def create(self,
         :param warehouse_id: str (optional)
           Optional argument to specify the warehouse for dashboard creation. If not specified, the first
           running warehouse will be used.
-        
+
         :returns: :class:`MonitorInfo`
         """
         body = {}
-        if assets_dir is not None: body['assets_dir'] = assets_dir
-        if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name
-        if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics]
+        if assets_dir is not None:
+            body["assets_dir"] = assets_dir
+        if baseline_table_name is not None:
+            body["baseline_table_name"] = baseline_table_name
+        if custom_metrics is not None:
+            body["custom_metrics"] = [v.as_dict() for v in custom_metrics]
         if data_classification_config is not None:
-            body['data_classification_config'] = data_classification_config.as_dict()
-        if inference_log is not None: body['inference_log'] = inference_log.as_dict()
-        if notifications is not None: body['notifications'] = notifications.as_dict()
-        if output_schema_name is not None: body['output_schema_name'] = output_schema_name
-        if schedule is not None: body['schedule'] = schedule.as_dict()
-        if skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = skip_builtin_dashboard
-        if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs]
-        if snapshot is not None: body['snapshot'] = snapshot.as_dict()
-        if time_series is not None: body['time_series'] = time_series.as_dict()
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.1/unity-catalog/tables/{table_name}/monitor',
-                           body=body,
-                           headers=headers)
+            body["data_classification_config"] = data_classification_config.as_dict()
+        if inference_log is not None:
+            body["inference_log"] = inference_log.as_dict()
+        if notifications is not None:
+            body["notifications"] = notifications.as_dict()
+        if output_schema_name is not None:
+            body["output_schema_name"] = output_schema_name
+        if schedule is not None:
+            body["schedule"] = schedule.as_dict()
+        if skip_builtin_dashboard is not None:
+            body["skip_builtin_dashboard"] = skip_builtin_dashboard
+        if slicing_exprs is not None:
+            body["slicing_exprs"] = [v for v in slicing_exprs]
+        if snapshot is not None:
+            body["snapshot"] = snapshot.as_dict()
+        if time_series is not None:
+            body["time_series"] = time_series.as_dict()
+        if warehouse_id is not None:
+            body["warehouse_id"] = warehouse_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor",
+            body=body,
+            headers=headers,
+        )
         return MonitorInfo.from_dict(res)
 
     def delete(self, table_name: str):
         """Delete a table monitor.
-        
+
         Deletes a monitor for the specified table.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
         owner of the table.
-        
+
         Additionally, the call must be made from the workspace where the monitor was created.
-        
+
         Note that the metric tables and dashboard will not be deleted as part of this call; those assets must
         be manually cleaned up (if desired).
-        
+
         :param table_name: str
           Full name of the table.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/tables/{table_name}/monitor', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor",
+            headers=headers,
+        )
 
     def get(self, table_name: str) -> MonitorInfo:
         """Get a table monitor.
-        
+
         Gets a monitor for the specified table.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema. 3. have the following
         permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent
         schema - **SELECT** privilege on the table.
-        
+
         The returned information includes configuration values, as well as information on assets created by
         the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different
         workspace than where the monitor was created.
-        
+
         :param table_name: str
           Full name of the table.
-        
+
         :returns: :class:`MonitorInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{table_name}/monitor', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor",
+            headers=headers,
+        )
         return MonitorInfo.from_dict(res)
 
     def get_refresh(self, table_name: str, refresh_id: str) -> MonitorRefreshInfo:
         """Get refresh.
-        
+
         Gets info about a specific monitor refresh using the given refresh ID.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
         **SELECT** privilege on the table.
-        
+
         Additionally, the call must be made from the workspace where the monitor was created.
-        
+
         :param table_name: str
           Full name of the table.
         :param refresh_id: str
           ID of the refresh.
-        
+
         :returns: :class:`MonitorRefreshInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}",
+            headers=headers,
+        )
         return MonitorRefreshInfo.from_dict(res)
 
     def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse:
         """List refreshes.
-        
+
         Gets an array containing the history of the most recent refreshes (up to 25) for this table.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
         **SELECT** privilege on the table.
-        
+
         Additionally, the call must be made from the workspace where the monitor was created.
-        
+
         :param table_name: str
           Full name of the table.
-        
+
         :returns: :class:`MonitorRefreshListResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes",
+            headers=headers,
+        )
         return MonitorRefreshListResponse.from_dict(res)
 
-    def regenerate_dashboard(self,
-                             table_name: str,
-                             *,
-                             warehouse_id: Optional[str] = None) -> RegenerateDashboardResponse:
+    def regenerate_dashboard(
+        self, table_name: str, *, warehouse_id: Optional[str] = None
+    ) -> RegenerateDashboardResponse:
         """Regenerate a monitoring dashboard.
-        
+
         Regenerates the monitoring dashboard for the specified table.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
         owner of the table
-        
+
         The call must be made from the workspace where the monitor was created. The dashboard will be
         regenerated in the assets directory that was specified when the monitor was created.
-        
+
         :param table_name: str
           Full name of the table.
         :param warehouse_id: str (optional)
           Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
           running warehouse will be used.
-        
+
         :returns: :class:`RegenerateDashboardResponse`
         """
         body = {}
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if warehouse_id is not None:
+            body["warehouse_id"] = warehouse_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard",
+            body=body,
+            headers=headers,
+        )
         return RegenerateDashboardResponse.from_dict(res)
 
     def run_refresh(self, table_name: str) -> MonitorRefreshInfo:
         """Queue a metric refresh for a monitor.
-        
+
         Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
         background.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
         owner of the table
-        
+
         Additionally, the call must be made from the workspace where the monitor was created.
-        
+
         :param table_name: str
           Full name of the table.
-        
+
         :returns: :class:`MonitorRefreshInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes',
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes",
+            headers=headers,
+        )
         return MonitorRefreshInfo.from_dict(res)
 
-    def update(self,
-               table_name: str,
-               output_schema_name: str,
-               *,
-               baseline_table_name: Optional[str] = None,
-               custom_metrics: Optional[List[MonitorMetric]] = None,
-               dashboard_id: Optional[str] = None,
-               data_classification_config: Optional[MonitorDataClassificationConfig] = None,
-               inference_log: Optional[MonitorInferenceLog] = None,
-               notifications: Optional[MonitorNotifications] = None,
-               schedule: Optional[MonitorCronSchedule] = None,
-               slicing_exprs: Optional[List[str]] = None,
-               snapshot: Optional[MonitorSnapshot] = None,
-               time_series: Optional[MonitorTimeSeries] = None) -> MonitorInfo:
+    def update(
+        self,
+        table_name: str,
+        output_schema_name: str,
+        *,
+        baseline_table_name: Optional[str] = None,
+        custom_metrics: Optional[List[MonitorMetric]] = None,
+        dashboard_id: Optional[str] = None,
+        data_classification_config: Optional[MonitorDataClassificationConfig] = None,
+        inference_log: Optional[MonitorInferenceLog] = None,
+        notifications: Optional[MonitorNotifications] = None,
+        schedule: Optional[MonitorCronSchedule] = None,
+        slicing_exprs: Optional[List[str]] = None,
+        snapshot: Optional[MonitorSnapshot] = None,
+        time_series: Optional[MonitorTimeSeries] = None,
+    ) -> MonitorInfo:
         """Update a table monitor.
-        
+
         Updates a monitor for the specified table.
-        
+
         The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
         table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
         - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
         owner of the table.
-        
+
         Additionally, the call must be made from the workspace where the monitor was created, and the caller
         must be the original creator of the monitor.
-        
+
         Certain configuration fields, such as output asset identifiers, cannot be updated.
-        
+
         :param table_name: str
           Full name of the table.
         :param output_schema_name: str
@@ -10897,28 +13216,43 @@ def update(self,
           Configuration for monitoring snapshot tables.
         :param time_series: :class:`MonitorTimeSeries` (optional)
           Configuration for monitoring time series tables.
-        
+
         :returns: :class:`MonitorInfo`
         """
         body = {}
-        if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name
-        if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics]
-        if dashboard_id is not None: body['dashboard_id'] = dashboard_id
+        if baseline_table_name is not None:
+            body["baseline_table_name"] = baseline_table_name
+        if custom_metrics is not None:
+            body["custom_metrics"] = [v.as_dict() for v in custom_metrics]
+        if dashboard_id is not None:
+            body["dashboard_id"] = dashboard_id
         if data_classification_config is not None:
-            body['data_classification_config'] = data_classification_config.as_dict()
-        if inference_log is not None: body['inference_log'] = inference_log.as_dict()
-        if notifications is not None: body['notifications'] = notifications.as_dict()
-        if output_schema_name is not None: body['output_schema_name'] = output_schema_name
-        if schedule is not None: body['schedule'] = schedule.as_dict()
-        if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs]
-        if snapshot is not None: body['snapshot'] = snapshot.as_dict()
-        if time_series is not None: body['time_series'] = time_series.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.1/unity-catalog/tables/{table_name}/monitor',
-                           body=body,
-                           headers=headers)
+            body["data_classification_config"] = data_classification_config.as_dict()
+        if inference_log is not None:
+            body["inference_log"] = inference_log.as_dict()
+        if notifications is not None:
+            body["notifications"] = notifications.as_dict()
+        if output_schema_name is not None:
+            body["output_schema_name"] = output_schema_name
+        if schedule is not None:
+            body["schedule"] = schedule.as_dict()
+        if slicing_exprs is not None:
+            body["slicing_exprs"] = [v for v in slicing_exprs]
+        if snapshot is not None:
+            body["snapshot"] = snapshot.as_dict()
+        if time_series is not None:
+            body["time_series"] = time_series.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.1/unity-catalog/tables/{table_name}/monitor",
+            body=body,
+            headers=headers,
+        )
         return MonitorInfo.from_dict(res)
 
 
@@ -10926,17 +13260,17 @@ class RegisteredModelsAPI:
     """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
     provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
     workspaces.
-    
+
     An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace.
     Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating
     new model versions currently requires use of the MLflow Python client. Once model versions are created,
     you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time
     serving using Databricks Model Serving.
-    
+
     All operations on registered models and model versions require USE_CATALOG permissions on the enclosing
     catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional
     privileges are required for various operations:
-    
+
     * To create a registered model, users must additionally have the CREATE_MODEL permission on the target
     schema. * To view registered model or model version metadata, model version data files, or invoke a model
     version, users must additionally have the EXECUTE permission on the registered model * To update
@@ -10944,32 +13278,34 @@ class RegisteredModelsAPI:
     registered model * To update other registered model or model version metadata (comments, aliases) create a
     new model version, or update permissions on the registered model, users must be owners of the registered
     model.
-    
+
     Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that
     specify a securable type, use "FUNCTION" as the securable type."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               catalog_name: str,
-               schema_name: str,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               storage_location: Optional[str] = None) -> RegisteredModelInfo:
+    def create(
+        self,
+        catalog_name: str,
+        schema_name: str,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        storage_location: Optional[str] = None,
+    ) -> RegisteredModelInfo:
         """Create a Registered Model.
-        
+
         Creates a new registered model in Unity Catalog.
-        
+
         File storage for model versions in the registered model will be located in the default location which
         is specified by the parent schema, or the parent catalog, or the Metastore.
-        
+
         For registered model creation to succeed, the user must satisfy the following conditions: - The caller
         must be a metastore admin, or be the owner of the parent catalog and schema, or have the
         **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
         - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema.
-        
+
         :param catalog_name: str
           The name of the catalog where the schema and the registered model reside
         :param schema_name: str
@@ -10980,73 +13316,91 @@ def create(self,
           The comment attached to the registered model
         :param storage_location: str (optional)
           The storage location on the cloud under which model version data files are stored
-        
+
         :returns: :class:`RegisteredModelInfo`
         """
         body = {}
-        if catalog_name is not None: body['catalog_name'] = catalog_name
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if schema_name is not None: body['schema_name'] = schema_name
-        if storage_location is not None: body['storage_location'] = storage_location
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/models', body=body, headers=headers)
+        if catalog_name is not None:
+            body["catalog_name"] = catalog_name
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if schema_name is not None:
+            body["schema_name"] = schema_name
+        if storage_location is not None:
+            body["storage_location"] = storage_location
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.1/unity-catalog/models", body=body, headers=headers)
         return RegisteredModelInfo.from_dict(res)
 
     def delete(self, full_name: str):
         """Delete a Registered Model.
-        
+
         Deletes a registered model and all its model versions from the specified parent catalog and schema.
-        
+
         The caller must be a metastore admin or an owner of the registered model. For the latter case, the
         caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the registered model
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/models/{full_name}",
+            headers=headers,
+        )
 
     def delete_alias(self, full_name: str, alias: str):
         """Delete a Registered Model Alias.
-        
+
         Deletes a registered model alias.
-        
+
         The caller must be a metastore admin or an owner of the registered model. For the latter case, the
         caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the registered model
         :param alias: str
           The name of the alias
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', headers=headers)
-
-    def get(self,
-            full_name: str,
-            *,
-            include_aliases: Optional[bool] = None,
-            include_browse: Optional[bool] = None) -> RegisteredModelInfo:
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}",
+            headers=headers,
+        )
+
+    def get(
+        self,
+        full_name: str,
+        *,
+        include_aliases: Optional[bool] = None,
+        include_browse: Optional[bool] = None,
+    ) -> RegisteredModelInfo:
         """Get a Registered Model.
-        
+
         Get a registered model.
-        
+
         The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
         registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
         privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the registered model
         :param include_aliases: bool (optional)
@@ -11054,38 +13408,49 @@ def get(self,
         :param include_browse: bool (optional)
           Whether to include registered models in the response for which the principal can only access
           selective metadata for
-        
+
         :returns: :class:`RegisteredModelInfo`
         """
 
         query = {}
-        if include_aliases is not None: query['include_aliases'] = include_aliases
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
+        if include_aliases is not None:
+            query["include_aliases"] = include_aliases
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/models/{full_name}', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/models/{full_name}",
+            query=query,
+            headers=headers,
+        )
         return RegisteredModelInfo.from_dict(res)
 
-    def list(self,
-             *,
-             catalog_name: Optional[str] = None,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None,
-             schema_name: Optional[str] = None) -> Iterator[RegisteredModelInfo]:
+    def list(
+        self,
+        *,
+        catalog_name: Optional[str] = None,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+        schema_name: Optional[str] = None,
+    ) -> Iterator[RegisteredModelInfo]:
         """List Registered Models.
-        
+
         List registered models. You can list registered models under a particular schema, or list all
         registered models in the current metastore.
-        
+
         The returned models are filtered based on the privileges of the calling user. For example, the
         metastore admin is able to list all the registered models. A regular user needs to be the owner or
         have the **EXECUTE** privilege on the registered model to recieve the registered models in the
         response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
         on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         There is no guarantee of a specific ordering of the elements in the response.
-        
+
         :param catalog_name: str (optional)
           The identifier of the catalog under which to list registered models. If specified, schema_name must
           be specified.
@@ -11094,13 +13459,13 @@ def list(self,
           selective metadata for
         :param max_results: int (optional)
           Max number of registered models to return.
-          
+
           If both catalog and schema are specified: - when max_results is not specified, the page length is
           set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the
           page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); -
           when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when
           set to a value less than 0, an invalid parameter error is returned;
-          
+
           If neither schema nor catalog is specified: - when max_results is not specified, the page length is
           set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the
           page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); -
@@ -11111,71 +13476,91 @@ def list(self,
         :param schema_name: str (optional)
           The identifier of the schema under which to list registered models. If specified, catalog_name must
           be specified.
-        
+
         :returns: Iterator over :class:`RegisteredModelInfo`
         """
 
         query = {}
-        if catalog_name is not None: query['catalog_name'] = catalog_name
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if schema_name is not None: query['schema_name'] = schema_name
-        headers = {'Accept': 'application/json', }
+        if catalog_name is not None:
+            query["catalog_name"] = catalog_name
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if schema_name is not None:
+            query["schema_name"] = schema_name
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/models', query=query, headers=headers)
-            if 'registered_models' in json:
-                for v in json['registered_models']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/models",
+                query=query,
+                headers=headers,
+            )
+            if "registered_models" in json:
+                for v in json["registered_models"]:
                     yield RegisteredModelInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredModelAlias:
         """Set a Registered Model Alias.
-        
+
         Set an alias on the specified registered model.
-        
+
         The caller must be a metastore admin or an owner of the registered model. For the latter case, the
         caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           Full name of the registered model
         :param alias: str
           The name of the alias
         :param version_num: int
           The version number of the model version to which the alias points
-        
+
         :returns: :class:`RegisteredModelAlias`
         """
         body = {}
-        if version_num is not None: body['version_num'] = version_num
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if version_num is not None:
+            body["version_num"] = version_num
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}",
+            body=body,
+            headers=headers,
+        )
         return RegisteredModelAlias.from_dict(res)
 
-    def update(self,
-               full_name: str,
-               *,
-               comment: Optional[str] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None) -> RegisteredModelInfo:
+    def update(
+        self,
+        full_name: str,
+        *,
+        comment: Optional[str] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+    ) -> RegisteredModelInfo:
         """Update a Registered Model.
-        
+
         Updates the specified registered model.
-        
+
         The caller must be a metastore admin or an owner of the registered model. For the latter case, the
         caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         Currently only the name, the owner or the comment of the registered model can be updated.
-        
+
         :param full_name: str
           The three-level (fully qualified) name of the registered model
         :param comment: str (optional)
@@ -11184,16 +13569,27 @@ def update(self,
           New name for the registered model.
         :param owner: str (optional)
           The identifier of the user who owns the registered model
-        
+
         :returns: :class:`RegisteredModelInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if comment is not None:
+            body["comment"] = comment
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/models/{full_name}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/models/{full_name}",
+            body=body,
+            headers=headers,
+        )
         return RegisteredModelInfo.from_dict(res)
 
 
@@ -11202,71 +13598,87 @@ class ResourceQuotasAPI:
     can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
     metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
     limits. For more information on resource quotas see the [Unity Catalog documentation].
-    
-    [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas"""
+
+    [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def get_quota(self, parent_securable_type: str, parent_full_name: str,
-                  quota_name: str) -> GetQuotaResponse:
+    def get_quota(
+        self,
+        parent_securable_type: str,
+        parent_full_name: str,
+        quota_name: str,
+    ) -> GetQuotaResponse:
         """Get information for a single resource quota.
-        
+
         The GetQuota API returns usage information for a single resource quota, defined as a child-parent
         pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
         asynchronously. The updated count might not be returned in the first call.
-        
+
         :param parent_securable_type: str
           Securable type of the quota parent.
         :param parent_full_name: str
           Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
         :param quota_name: str
           Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
-        
+
         :returns: :class:`GetQuotaResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}',
-            headers=headers)
+            "GET",
+            f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}",
+            headers=headers,
+        )
         return GetQuotaResponse.from_dict(res)
 
-    def list_quotas(self,
-                    *,
-                    max_results: Optional[int] = None,
-                    page_token: Optional[str] = None) -> Iterator[QuotaInfo]:
+    def list_quotas(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[QuotaInfo]:
         """List all resource quotas under a metastore.
-        
+
         ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
         counts returned. This API does not trigger a refresh of quota counts.
-        
+
         :param max_results: int (optional)
           The number of quotas to return.
         :param page_token: str (optional)
           Opaque token for the next page of results.
-        
+
         :returns: Iterator over :class:`QuotaInfo`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/unity-catalog/resource-quotas/all-resource-quotas',
-                                query=query,
-                                headers=headers)
-            if 'quotas' in json:
-                for v in json['quotas']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas",
+                query=query,
+                headers=headers,
+            )
+            if "quotas" in json:
+                for v in json["quotas"]:
                     yield QuotaInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class SchemasAPI:
@@ -11278,18 +13690,20 @@ class SchemasAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               catalog_name: str,
-               *,
-               comment: Optional[str] = None,
-               properties: Optional[Dict[str, str]] = None,
-               storage_root: Optional[str] = None) -> SchemaInfo:
+    def create(
+        self,
+        name: str,
+        catalog_name: str,
+        *,
+        comment: Optional[str] = None,
+        properties: Optional[Dict[str, str]] = None,
+        storage_root: Optional[str] = None,
+    ) -> SchemaInfo:
         """Create a schema.
-        
+
         Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the
         **CREATE_SCHEMA** privilege in the parent catalog.
-        
+
         :param name: str
           Name of schema, relative to parent catalog.
         :param catalog_name: str
@@ -11300,75 +13714,106 @@ def create(self,
           A map of key-value properties attached to the securable.
         :param storage_root: str (optional)
           Storage root URL for managed tables within schema.
-        
+
         :returns: :class:`SchemaInfo`
         """
         body = {}
-        if catalog_name is not None: body['catalog_name'] = catalog_name
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if properties is not None: body['properties'] = properties
-        if storage_root is not None: body['storage_root'] = storage_root
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if catalog_name is not None:
+            body["catalog_name"] = catalog_name
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if properties is not None:
+            body["properties"] = properties
+        if storage_root is not None:
+            body["storage_root"] = storage_root
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/schemas', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/schemas",
+            body=body,
+            headers=headers,
+        )
         return SchemaInfo.from_dict(res)
 
     def delete(self, full_name: str, *, force: Optional[bool] = None):
         """Delete a schema.
-        
+
         Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an
         owner of the parent catalog.
-        
+
         :param full_name: str
           Full name of the schema.
         :param force: bool (optional)
           Force deletion even if the schema is not empty.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', query=query, headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/schemas/{full_name}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo:
         """Get a schema.
-        
+
         Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the
         schema, or a user that has the **USE_SCHEMA** privilege on the schema.
-        
+
         :param full_name: str
           Full name of the schema.
         :param include_browse: bool (optional)
           Whether to include schemas in the response for which the principal can only access selective
           metadata for
-        
+
         :returns: :class:`SchemaInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/schemas/{full_name}', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/schemas/{full_name}",
+            query=query,
+            headers=headers,
+        )
         return SchemaInfo.from_dict(res)
 
-    def list(self,
-             catalog_name: str,
-             *,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[SchemaInfo]:
+    def list(
+        self,
+        catalog_name: str,
+        *,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[SchemaInfo]:
         """List schemas.
-        
+
         Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the
         owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas
         owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved.
         There is no guarantee of a specific ordering of the elements in the array.
-        
+
         :param catalog_name: str
           Parent catalog for schemas of interest.
         :param include_browse: bool (optional)
@@ -11381,42 +13826,56 @@ def list(self,
           (recommended); - when set to a value less than 0, an invalid parameter error is returned;
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`SchemaInfo`
         """
 
         query = {}
-        if catalog_name is not None: query['catalog_name'] = catalog_name
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if catalog_name is not None:
+            query["catalog_name"] = catalog_name
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/schemas', query=query, headers=headers)
-            if 'schemas' in json:
-                for v in json['schemas']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/schemas",
+                query=query,
+                headers=headers,
+            )
+            if "schemas" in json:
+                for v in json["schemas"]:
                     yield SchemaInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               full_name: str,
-               *,
-               comment: Optional[str] = None,
-               enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               properties: Optional[Dict[str, str]] = None) -> SchemaInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        full_name: str,
+        *,
+        comment: Optional[str] = None,
+        enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        properties: Optional[Dict[str, str]] = None,
+    ) -> SchemaInfo:
         """Update a schema.
-        
+
         Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If
         the caller is a metastore admin, only the __owner__ field can be changed in the update. If the
         __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA**
         privilege on the parent catalog.
-        
+
         :param full_name: str
           Full name of the schema.
         :param comment: str (optional)
@@ -11429,19 +13888,31 @@ def update(self,
           Username of current owner of schema.
         :param properties: Dict[str,str] (optional)
           A map of key-value properties attached to the securable.
-        
+
         :returns: :class:`SchemaInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
+        if comment is not None:
+            body["comment"] = comment
         if enable_predictive_optimization is not None:
-            body['enable_predictive_optimization'] = enable_predictive_optimization.value
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if properties is not None: body['properties'] = properties
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["enable_predictive_optimization"] = enable_predictive_optimization.value
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if properties is not None:
+            body["properties"] = properties
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/schemas/{full_name}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/schemas/{full_name}",
+            body=body,
+            headers=headers,
+        )
         return SchemaInfo.from_dict(res)
 
 
@@ -11451,30 +13922,33 @@ class StorageCredentialsAPI:
     control which users and groups can access the credential. If a user does not have access to a storage
     credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your
     cloud tenant on the user’s behalf.
-    
+
     Databricks recommends using external locations rather than using storage credentials directly.
-    
+
     To create storage credentials, you must be a Databricks account admin. The account admin who creates the
-    storage credential can delegate ownership to another user or group to manage permissions on it."""
+    storage credential can delegate ownership to another user or group to manage permissions on it.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               *,
-               aws_iam_role: Optional[AwsIamRoleRequest] = None,
-               azure_managed_identity: Optional[AzureManagedIdentityRequest] = None,
-               azure_service_principal: Optional[AzureServicePrincipal] = None,
-               cloudflare_api_token: Optional[CloudflareApiToken] = None,
-               comment: Optional[str] = None,
-               databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
-               read_only: Optional[bool] = None,
-               skip_validation: Optional[bool] = None) -> StorageCredentialInfo:
+    def create(
+        self,
+        name: str,
+        *,
+        aws_iam_role: Optional[AwsIamRoleRequest] = None,
+        azure_managed_identity: Optional[AzureManagedIdentityRequest] = None,
+        azure_service_principal: Optional[AzureServicePrincipal] = None,
+        cloudflare_api_token: Optional[CloudflareApiToken] = None,
+        comment: Optional[str] = None,
+        databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
+        read_only: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+    ) -> StorageCredentialInfo:
         """Create a storage credential.
-        
+
         Creates a new storage credential.
-        
+
         :param name: str
           The credential name. The name must be unique within the metastore.
         :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
@@ -11493,78 +13967,105 @@ def create(self,
           Whether the storage credential is only usable for read operations.
         :param skip_validation: bool (optional)
           Supplying true to this argument skips validation of the created credential.
-        
+
         :returns: :class:`StorageCredentialInfo`
         """
         body = {}
-        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if aws_iam_role is not None:
+            body["aws_iam_role"] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
-            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+            body["azure_managed_identity"] = azure_managed_identity.as_dict()
         if azure_service_principal is not None:
-            body['azure_service_principal'] = azure_service_principal.as_dict()
-        if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
-        if comment is not None: body['comment'] = comment
+            body["azure_service_principal"] = azure_service_principal.as_dict()
+        if cloudflare_api_token is not None:
+            body["cloudflare_api_token"] = cloudflare_api_token.as_dict()
+        if comment is not None:
+            body["comment"] = comment
         if databricks_gcp_service_account is not None:
-            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
-        if name is not None: body['name'] = name
-        if read_only is not None: body['read_only'] = read_only
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict()
+        if name is not None:
+            body["name"] = name
+        if read_only is not None:
+            body["read_only"] = read_only
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/storage-credentials', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/storage-credentials",
+            body=body,
+            headers=headers,
+        )
         return StorageCredentialInfo.from_dict(res)
 
     def delete(self, name: str, *, force: Optional[bool] = None):
         """Delete a credential.
-        
+
         Deletes a storage credential from the metastore. The caller must be an owner of the storage
         credential.
-        
+
         :param name: str
           Name of the storage credential.
         :param force: bool (optional)
           Force deletion even if there are dependent external locations or external tables.
-        
-        
+
+
         """
 
         query = {}
-        if force is not None: query['force'] = force
-        headers = {'Accept': 'application/json', }
+        if force is not None:
+            query["force"] = force
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/unity-catalog/storage-credentials/{name}',
-                     query=query,
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/storage-credentials/{name}",
+            query=query,
+            headers=headers,
+        )
 
     def get(self, name: str) -> StorageCredentialInfo:
         """Get a credential.
-        
+
         Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
         storage credential, or have some permission on the storage credential.
-        
+
         :param name: str
           Name of the storage credential.
-        
+
         :returns: :class:`StorageCredentialInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/storage-credentials/{name}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/storage-credentials/{name}",
+            headers=headers,
+        )
         return StorageCredentialInfo.from_dict(res)
 
-    def list(self,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[StorageCredentialInfo]:
+    def list(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[StorageCredentialInfo]:
         """List credentials.
-        
+
         Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
         only those storage credentials the caller has permission to access. If the caller is a metastore
         admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the
         elements in the array.
-        
+
         :param max_results: int (optional)
           Maximum number of storage credentials to return. If not set, all the storage credentials are
           returned (not recommended). - when set to a value greater than 0, the page length is the minimum of
@@ -11573,47 +14074,56 @@ def list(self,
           returned;
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`StorageCredentialInfo`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/unity-catalog/storage-credentials',
-                                query=query,
-                                headers=headers)
-            if 'storage_credentials' in json:
-                for v in json['storage_credentials']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/storage-credentials",
+                query=query,
+                headers=headers,
+            )
+            if "storage_credentials" in json:
+                for v in json["storage_credentials"]:
                     yield StorageCredentialInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               aws_iam_role: Optional[AwsIamRoleRequest] = None,
-               azure_managed_identity: Optional[AzureManagedIdentityResponse] = None,
-               azure_service_principal: Optional[AzureServicePrincipal] = None,
-               cloudflare_api_token: Optional[CloudflareApiToken] = None,
-               comment: Optional[str] = None,
-               databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
-               force: Optional[bool] = None,
-               isolation_mode: Optional[IsolationMode] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               read_only: Optional[bool] = None,
-               skip_validation: Optional[bool] = None) -> StorageCredentialInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        name: str,
+        *,
+        aws_iam_role: Optional[AwsIamRoleRequest] = None,
+        azure_managed_identity: Optional[AzureManagedIdentityResponse] = None,
+        azure_service_principal: Optional[AzureServicePrincipal] = None,
+        cloudflare_api_token: Optional[CloudflareApiToken] = None,
+        comment: Optional[str] = None,
+        databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
+        force: Optional[bool] = None,
+        isolation_mode: Optional[IsolationMode] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        read_only: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+    ) -> StorageCredentialInfo:
         """Update a credential.
-        
+
         Updates a storage credential on the metastore.
-        
+
         :param name: str
           Name of the storage credential.
         :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
@@ -11639,56 +14149,72 @@ def update(self,
           Whether the storage credential is only usable for read operations.
         :param skip_validation: bool (optional)
           Supplying true to this argument skips validation of the updated credential.
-        
+
         :returns: :class:`StorageCredentialInfo`
         """
         body = {}
-        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if aws_iam_role is not None:
+            body["aws_iam_role"] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
-            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+            body["azure_managed_identity"] = azure_managed_identity.as_dict()
         if azure_service_principal is not None:
-            body['azure_service_principal'] = azure_service_principal.as_dict()
-        if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
-        if comment is not None: body['comment'] = comment
+            body["azure_service_principal"] = azure_service_principal.as_dict()
+        if cloudflare_api_token is not None:
+            body["cloudflare_api_token"] = cloudflare_api_token.as_dict()
+        if comment is not None:
+            body["comment"] = comment
         if databricks_gcp_service_account is not None:
-            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
-        if force is not None: body['force'] = force
-        if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if read_only is not None: body['read_only'] = read_only
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/storage-credentials/{name}',
-                           body=body,
-                           headers=headers)
+            body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict()
+        if force is not None:
+            body["force"] = force
+        if isolation_mode is not None:
+            body["isolation_mode"] = isolation_mode.value
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if read_only is not None:
+            body["read_only"] = read_only
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/storage-credentials/{name}",
+            body=body,
+            headers=headers,
+        )
         return StorageCredentialInfo.from_dict(res)
 
-    def validate(self,
-                 *,
-                 aws_iam_role: Optional[AwsIamRoleRequest] = None,
-                 azure_managed_identity: Optional[AzureManagedIdentityRequest] = None,
-                 azure_service_principal: Optional[AzureServicePrincipal] = None,
-                 cloudflare_api_token: Optional[CloudflareApiToken] = None,
-                 databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
-                 external_location_name: Optional[str] = None,
-                 read_only: Optional[bool] = None,
-                 storage_credential_name: Optional[str] = None,
-                 url: Optional[str] = None) -> ValidateStorageCredentialResponse:
+    def validate(
+        self,
+        *,
+        aws_iam_role: Optional[AwsIamRoleRequest] = None,
+        azure_managed_identity: Optional[AzureManagedIdentityRequest] = None,
+        azure_service_principal: Optional[AzureServicePrincipal] = None,
+        cloudflare_api_token: Optional[CloudflareApiToken] = None,
+        databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None,
+        external_location_name: Optional[str] = None,
+        read_only: Optional[bool] = None,
+        storage_credential_name: Optional[str] = None,
+        url: Optional[str] = None,
+    ) -> ValidateStorageCredentialResponse:
         """Validate a storage credential.
-        
+
         Validates a storage credential. At least one of __external_location_name__ and __url__ need to be
         provided. If only one of them is provided, it will be used for validation. And if both are provided,
         the __url__ will be used for validation, and __external_location_name__ will be ignored when checking
         overlapping urls.
-        
+
         Either the __storage_credential_name__ or the cloud-specific credential must be provided.
-        
+
         The caller must be a metastore admin or the storage credential owner or have the
         **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential.
-        
+
         :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
           The AWS IAM role configuration.
         :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
@@ -11707,88 +14233,110 @@ def validate(self,
           The name of the storage credential to validate.
         :param url: str (optional)
           The external location url to validate.
-        
+
         :returns: :class:`ValidateStorageCredentialResponse`
         """
         body = {}
-        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if aws_iam_role is not None:
+            body["aws_iam_role"] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
-            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+            body["azure_managed_identity"] = azure_managed_identity.as_dict()
         if azure_service_principal is not None:
-            body['azure_service_principal'] = azure_service_principal.as_dict()
-        if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict()
+            body["azure_service_principal"] = azure_service_principal.as_dict()
+        if cloudflare_api_token is not None:
+            body["cloudflare_api_token"] = cloudflare_api_token.as_dict()
         if databricks_gcp_service_account is not None:
-            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
-        if external_location_name is not None: body['external_location_name'] = external_location_name
-        if read_only is not None: body['read_only'] = read_only
-        if storage_credential_name is not None: body['storage_credential_name'] = storage_credential_name
-        if url is not None: body['url'] = url
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           '/api/2.1/unity-catalog/validate-storage-credentials',
-                           body=body,
-                           headers=headers)
+            body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict()
+        if external_location_name is not None:
+            body["external_location_name"] = external_location_name
+        if read_only is not None:
+            body["read_only"] = read_only
+        if storage_credential_name is not None:
+            body["storage_credential_name"] = storage_credential_name
+        if url is not None:
+            body["url"] = url
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/validate-storage-credentials",
+            body=body,
+            headers=headers,
+        )
         return ValidateStorageCredentialResponse.from_dict(res)
 
 
 class SystemSchemasAPI:
     """A system schema is a schema that lives within the system catalog. A system schema may contain information
-    about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc."""
+    about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def disable(self, metastore_id: str, schema_name: str):
         """Disable a system schema.
-        
+
         Disables the system schema and removes it from the system catalog. The caller must be an account admin
         or a metastore admin.
-        
+
         :param metastore_id: str
           The metastore ID under which the system schema lives.
         :param schema_name: str
           Full name of the system schema.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}",
+            headers=headers,
+        )
 
     def enable(self, metastore_id: str, schema_name: str):
         """Enable a system schema.
-        
+
         Enables the system schema and adds it to the system catalog. The caller must be an account admin or a
         metastore admin.
-        
+
         :param metastore_id: str
           The metastore ID under which the system schema lives.
         :param schema_name: str
           Full name of the system schema.
-        
-        
-        """
 
-        headers = {'Accept': 'application/json', }
 
-        self._api.do('PUT',
-                     f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}',
-                     headers=headers)
+        """
+
+        headers = {
+            "Accept": "application/json",
+        }
 
-    def list(self,
-             metastore_id: str,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[SystemSchemaInfo]:
+        self._api.do(
+            "PUT",
+            f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}",
+            headers=headers,
+        )
+
+    def list(
+        self,
+        metastore_id: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[SystemSchemaInfo]:
         """List system schemas.
-        
+
         Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore
         admin.
-        
+
         :param metastore_id: str
           The ID for the metastore in which the system schema resides.
         :param max_results: int (optional)
@@ -11798,38 +14346,45 @@ def list(self,
           is returned; - If not set, all the schemas are returned (not recommended).
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`SystemSchemaInfo`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
-                                query=query,
-                                headers=headers)
-            if 'schemas' in json:
-                for v in json['schemas']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas",
+                query=query,
+                headers=headers,
+            )
+            if "schemas" in json:
+                for v in json["schemas"]:
                     yield SystemSchemaInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class TableConstraintsAPI:
     """Primary key and foreign key constraints encode relationships between fields in tables.
-    
+
     Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a
     primary key in another table. This primary key is the parent constraint of the foreign key and the table
     this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child
     constraint of its referenced primary key; the table of the foreign key is the child table of the primary
     key.
-    
+
     You can declare primary keys and foreign keys as part of the table specification during table creation.
     You can also add or drop constraints on existing tables."""
 
@@ -11838,44 +14393,54 @@ def __init__(self, api_client):
 
     def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstraint:
         """Create a table constraint.
-        
+
         Creates a new table constraint.
-        
+
         For the table constraint creation to succeed, the user must satisfy both of these conditions: - the
         user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
         privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a
         __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent
         table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the
         owner of the referenced parent table.
-        
+
         :param full_name_arg: str
           The full name of the table referenced by the constraint.
         :param constraint: :class:`TableConstraint`
           A table constraint, as defined by *one* of the following fields being set:
           __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.
-        
+
         :returns: :class:`TableConstraint`
         """
         body = {}
-        if constraint is not None: body['constraint'] = constraint.as_dict()
-        if full_name_arg is not None: body['full_name_arg'] = full_name_arg
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if constraint is not None:
+            body["constraint"] = constraint.as_dict()
+        if full_name_arg is not None:
+            body["full_name_arg"] = full_name_arg
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/constraints', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/constraints",
+            body=body,
+            headers=headers,
+        )
         return TableConstraint.from_dict(res)
 
     def delete(self, full_name: str, constraint_name: str, cascade: bool):
         """Delete a table constraint.
-        
+
         Deletes a table constraint.
-        
+
         For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the
         user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
         privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is
         **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG**
         privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner
         of the table.
-        
+
         :param full_name: str
           Full name of the table referenced by the constraint.
         :param constraint_name: str
@@ -11883,19 +14448,25 @@ def delete(self, full_name: str, constraint_name: str, cascade: bool):
         :param cascade: bool
           If true, try deleting all child constraints of the current constraint. If false, reject this
           operation if the current constraint has any child constraints.
-        
-        
+
+
         """
 
         query = {}
-        if cascade is not None: query['cascade'] = cascade
-        if constraint_name is not None: query['constraint_name'] = constraint_name
-        headers = {'Accept': 'application/json', }
+        if cascade is not None:
+            query["cascade"] = cascade
+        if constraint_name is not None:
+            query["constraint_name"] = constraint_name
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/unity-catalog/constraints/{full_name}',
-                     query=query,
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/constraints/{full_name}",
+            query=query,
+            headers=headers,
+        )
 
 
 class TablesAPI:
@@ -11904,7 +14475,7 @@ class TablesAPI:
     have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT
     permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the
     USE_SCHEMA permission on its parent schema.
-    
+
     A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table
     (rather than a managed or external table)."""
 
@@ -11913,57 +14484,71 @@ def __init__(self, api_client):
 
     def delete(self, full_name: str):
         """Delete a table.
-        
+
         Deletes a table from the specified parent catalog and schema. The caller must be the owner of the
         parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the
         parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent
         catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param full_name: str
           Full name of the table.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/tables/{full_name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/tables/{full_name}",
+            headers=headers,
+        )
 
     def exists(self, full_name: str) -> TableExistsResponse:
         """Get boolean reflecting if table exists.
-        
+
         Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one
         of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the
         owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the
         **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema,
         and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on
         the parent catalog * Have BROWSE privilege on the parent schema.
-        
+
         :param full_name: str
           Full name of the table.
-        
+
         :returns: :class:`TableExistsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}/exists', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/tables/{full_name}/exists",
+            headers=headers,
+        )
         return TableExistsResponse.from_dict(res)
 
-    def get(self,
-            full_name: str,
-            *,
-            include_browse: Optional[bool] = None,
-            include_delta_metadata: Optional[bool] = None,
-            include_manifest_capabilities: Optional[bool] = None) -> TableInfo:
+    def get(
+        self,
+        full_name: str,
+        *,
+        include_browse: Optional[bool] = None,
+        include_delta_metadata: Optional[bool] = None,
+        include_manifest_capabilities: Optional[bool] = None,
+    ) -> TableInfo:
         """Get a table.
-        
+
         Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
         following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of
         the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG**
         privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be
         the table owner or have the SELECT privilege on the table.
-        
+
         :param full_name: str
           Full name of the table.
         :param include_browse: bool (optional)
@@ -11973,40 +14558,51 @@ def get(self,
           Whether delta metadata should be included in the response.
         :param include_manifest_capabilities: bool (optional)
           Whether to include a manifest containing capabilities the table has.
-        
+
         :returns: :class:`TableInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if include_delta_metadata is not None:
+            query["include_delta_metadata"] = include_delta_metadata
         if include_manifest_capabilities is not None:
-            query['include_manifest_capabilities'] = include_manifest_capabilities
-        headers = {'Accept': 'application/json', }
+            query["include_manifest_capabilities"] = include_manifest_capabilities
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/tables/{full_name}",
+            query=query,
+            headers=headers,
+        )
         return TableInfo.from_dict(res)
 
-    def list(self,
-             catalog_name: str,
-             schema_name: str,
-             *,
-             include_browse: Optional[bool] = None,
-             include_delta_metadata: Optional[bool] = None,
-             include_manifest_capabilities: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             omit_columns: Optional[bool] = None,
-             omit_properties: Optional[bool] = None,
-             omit_username: Optional[bool] = None,
-             page_token: Optional[str] = None) -> Iterator[TableInfo]:
+    def list(
+        self,
+        catalog_name: str,
+        schema_name: str,
+        *,
+        include_browse: Optional[bool] = None,
+        include_delta_metadata: Optional[bool] = None,
+        include_manifest_capabilities: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        omit_columns: Optional[bool] = None,
+        omit_properties: Optional[bool] = None,
+        omit_username: Optional[bool] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[TableInfo]:
         """List tables.
-        
+
         Gets an array of all tables for the current metastore under the parent catalog and schema. The caller
         must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the
         latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
         catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific
         ordering of the elements in the array.
-        
+
         :param catalog_name: str
           Name of parent catalog for tables of interest.
         :param schema_name: str
@@ -12032,55 +14628,74 @@ def list(self,
           not.
         :param page_token: str (optional)
           Opaque token to send for the next page of results (pagination).
-        
+
         :returns: Iterator over :class:`TableInfo`
         """
 
         query = {}
-        if catalog_name is not None: query['catalog_name'] = catalog_name
-        if include_browse is not None: query['include_browse'] = include_browse
-        if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
+        if catalog_name is not None:
+            query["catalog_name"] = catalog_name
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if include_delta_metadata is not None:
+            query["include_delta_metadata"] = include_delta_metadata
         if include_manifest_capabilities is not None:
-            query['include_manifest_capabilities'] = include_manifest_capabilities
-        if max_results is not None: query['max_results'] = max_results
-        if omit_columns is not None: query['omit_columns'] = omit_columns
-        if omit_properties is not None: query['omit_properties'] = omit_properties
-        if omit_username is not None: query['omit_username'] = omit_username
-        if page_token is not None: query['page_token'] = page_token
-        if schema_name is not None: query['schema_name'] = schema_name
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+            query["include_manifest_capabilities"] = include_manifest_capabilities
+        if max_results is not None:
+            query["max_results"] = max_results
+        if omit_columns is not None:
+            query["omit_columns"] = omit_columns
+        if omit_properties is not None:
+            query["omit_properties"] = omit_properties
+        if omit_username is not None:
+            query["omit_username"] = omit_username
+        if page_token is not None:
+            query["page_token"] = page_token
+        if schema_name is not None:
+            query["schema_name"] = schema_name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query, headers=headers)
-            if 'tables' in json:
-                for v in json['tables']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/tables",
+                query=query,
+                headers=headers,
+            )
+            if "tables" in json:
+                for v in json["tables"]:
                     yield TableInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_summaries(self,
-                       catalog_name: str,
-                       *,
-                       include_manifest_capabilities: Optional[bool] = None,
-                       max_results: Optional[int] = None,
-                       page_token: Optional[str] = None,
-                       schema_name_pattern: Optional[str] = None,
-                       table_name_pattern: Optional[str] = None) -> Iterator[TableSummary]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_summaries(
+        self,
+        catalog_name: str,
+        *,
+        include_manifest_capabilities: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+        schema_name_pattern: Optional[str] = None,
+        table_name_pattern: Optional[str] = None,
+    ) -> Iterator[TableSummary]:
         """List table summaries.
-        
+
         Gets an array of summaries for tables for a schema and catalog within the metastore. The table
         summaries returned are either:
-        
+
         * summaries for tables (within the current metastore and parent catalog and schema), when the user is
         a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent
         catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or
         **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the
         **USE_CATALOG** privilege on the parent catalog.
-        
+
         There is no guarantee of a specific ordering of the elements in the array.
-        
+
         :param catalog_name: str
           Name of parent catalog for tables of interest.
         :param include_manifest_capabilities: bool (optional)
@@ -12097,49 +14712,71 @@ def list_summaries(self,
           A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty.
         :param table_name_pattern: str (optional)
           A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty.
-        
+
         :returns: Iterator over :class:`TableSummary`
         """
 
         query = {}
-        if catalog_name is not None: query['catalog_name'] = catalog_name
+        if catalog_name is not None:
+            query["catalog_name"] = catalog_name
         if include_manifest_capabilities is not None:
-            query['include_manifest_capabilities'] = include_manifest_capabilities
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if schema_name_pattern is not None: query['schema_name_pattern'] = schema_name_pattern
-        if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+            query["include_manifest_capabilities"] = include_manifest_capabilities
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if schema_name_pattern is not None:
+            query["schema_name_pattern"] = schema_name_pattern
+        if table_name_pattern is not None:
+            query["table_name_pattern"] = table_name_pattern
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/table-summaries', query=query, headers=headers)
-            if 'tables' in json:
-                for v in json['tables']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/table-summaries",
+                query=query,
+                headers=headers,
+            )
+            if "tables" in json:
+                for v in json["tables"]:
                     yield TableSummary.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, full_name: str, *, owner: Optional[str] = None):
         """Update a table owner.
-        
+
         Change the owner of the table. The caller must be the owner of the parent catalog, have the
         **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner
         of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
         privilege on the parent schema.
-        
+
         :param full_name: str
           Full name of the table.
         :param owner: str (optional)
-        
-        
+
+
         """
         body = {}
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if owner is not None:
+            body["owner"] = owner
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH', f'/api/2.1/unity-catalog/tables/{full_name}', body=body, headers=headers)
+        self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/tables/{full_name}",
+            body=body,
+            headers=headers,
+        )
 
 
 class TemporaryTableCredentialsAPI:
@@ -12160,34 +14797,42 @@ def __init__(self, api_client):
         self._api = api_client
 
     def generate_temporary_table_credentials(
-            self,
-            *,
-            operation: Optional[TableOperation] = None,
-            table_id: Optional[str] = None) -> GenerateTemporaryTableCredentialResponse:
+        self,
+        *,
+        operation: Optional[TableOperation] = None,
+        table_id: Optional[str] = None,
+    ) -> GenerateTemporaryTableCredentialResponse:
         """Generate a temporary table credential.
-        
+
         Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
         must have external_access_enabled flag set to true (default false). The caller must have
         EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
         owners.
-        
+
         :param operation: :class:`TableOperation` (optional)
           The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
           specified, the credentials returned will have write permissions, otherwise, it will be read only.
         :param table_id: str (optional)
           UUID of the table to read or write.
-        
+
         :returns: :class:`GenerateTemporaryTableCredentialResponse`
         """
         body = {}
-        if operation is not None: body['operation'] = operation.value
-        if table_id is not None: body['table_id'] = table_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if operation is not None:
+            body["operation"] = operation.value
+        if table_id is not None:
+            body["table_id"] = table_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           '/api/2.0/unity-catalog/temporary-table-credentials',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/unity-catalog/temporary-table-credentials",
+            body=body,
+            headers=headers,
+        )
         return GenerateTemporaryTableCredentialResponse.from_dict(res)
 
 
@@ -12202,32 +14847,34 @@ class VolumesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               catalog_name: str,
-               schema_name: str,
-               name: str,
-               volume_type: VolumeType,
-               *,
-               comment: Optional[str] = None,
-               storage_location: Optional[str] = None) -> VolumeInfo:
+    def create(
+        self,
+        catalog_name: str,
+        schema_name: str,
+        name: str,
+        volume_type: VolumeType,
+        *,
+        comment: Optional[str] = None,
+        storage_location: Optional[str] = None,
+    ) -> VolumeInfo:
         """Create a Volume.
-        
+
         Creates a new volume.
-        
+
         The user could create either an external volume or a managed volume. An external volume will be
         created in the specified external location, while a managed volume will be located in the default
         location which is specified by the parent schema, or the parent catalog, or the Metastore.
-        
+
         For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a
         metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG**
         privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller
         must have **CREATE VOLUME** privilege on the parent schema.
-        
+
         For an external volume, following conditions also need to satisfy - The caller must have **CREATE
         EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes
         existing in the specified storage location. - The specified storage location is not under the location
         of other tables, nor volumes, or catalogs or schemas.
-        
+
         :param catalog_name: str
           The name of the catalog where the schema and the volume are
         :param schema_name: str
@@ -12239,59 +14886,75 @@ def create(self,
           The comment attached to the volume
         :param storage_location: str (optional)
           The storage location on the cloud
-        
+
         :returns: :class:`VolumeInfo`
         """
         body = {}
-        if catalog_name is not None: body['catalog_name'] = catalog_name
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if schema_name is not None: body['schema_name'] = schema_name
-        if storage_location is not None: body['storage_location'] = storage_location
-        if volume_type is not None: body['volume_type'] = volume_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if catalog_name is not None:
+            body["catalog_name"] = catalog_name
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if schema_name is not None:
+            body["schema_name"] = schema_name
+        if storage_location is not None:
+            body["storage_location"] = storage_location
+        if volume_type is not None:
+            body["volume_type"] = volume_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.1/unity-catalog/volumes', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/volumes",
+            body=body,
+            headers=headers,
+        )
         return VolumeInfo.from_dict(res)
 
     def delete(self, name: str):
         """Delete a Volume.
-        
+
         Deletes a volume from the specified parent catalog and schema.
-        
+
         The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
         also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
         privilege on the parent schema.
-        
+
         :param name: str
           The three-level (fully qualified) name of the volume
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/volumes/{name}', headers=headers)
-
-    def list(self,
-             catalog_name: str,
-             schema_name: str,
-             *,
-             include_browse: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[VolumeInfo]:
+        self._api.do("DELETE", f"/api/2.1/unity-catalog/volumes/{name}", headers=headers)
+
+    def list(
+        self,
+        catalog_name: str,
+        schema_name: str,
+        *,
+        include_browse: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[VolumeInfo]:
         """List Volumes.
-        
+
         Gets an array of volumes for the current metastore under the parent catalog and schema.
-        
+
         The returned volumes are filtered based on the privileges of the calling user. For example, the
         metastore admin is able to list all the volumes. A regular user needs to be the owner or have the
         **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case,
         the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
         **USE_SCHEMA** privilege on the parent schema.
-        
+
         There is no guarantee of a specific ordering of the elements in the array.
-        
+
         :param catalog_name: str
           The identifier of the catalog
         :param schema_name: str
@@ -12301,81 +14964,103 @@ def list(self,
           metadata for
         :param max_results: int (optional)
           Maximum number of volumes to return (page length).
-          
+
           If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set
           to a value greater than 0, the page length is the minimum of this value and a server configured
           value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value
           (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter
           error is returned;
-          
+
           Note: this parameter controls only the maximum number of volumes to return. The actual number of
           volumes returned in a page may be smaller than this value, including 0, even if there are more
           pages.
         :param page_token: str (optional)
           Opaque token returned by a previous request. It must be included in the request to retrieve the next
           page of results (pagination).
-        
+
         :returns: Iterator over :class:`VolumeInfo`
         """
 
         query = {}
-        if catalog_name is not None: query['catalog_name'] = catalog_name
-        if include_browse is not None: query['include_browse'] = include_browse
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if schema_name is not None: query['schema_name'] = schema_name
-        headers = {'Accept': 'application/json', }
+        if catalog_name is not None:
+            query["catalog_name"] = catalog_name
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if schema_name is not None:
+            query["schema_name"] = schema_name
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/volumes', query=query, headers=headers)
-            if 'volumes' in json:
-                for v in json['volumes']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/volumes",
+                query=query,
+                headers=headers,
+            )
+            if "volumes" in json:
+                for v in json["volumes"]:
                     yield VolumeInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def read(self, name: str, *, include_browse: Optional[bool] = None) -> VolumeInfo:
         """Get a Volume.
-        
+
         Gets a volume from the metastore for a specific catalog and schema.
-        
+
         The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the
         volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
         on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
+
         :param name: str
           The three-level (fully qualified) name of the volume
         :param include_browse: bool (optional)
           Whether to include volumes in the response for which the principal can only access selective
           metadata for
-        
+
         :returns: :class:`VolumeInfo`
         """
 
         query = {}
-        if include_browse is not None: query['include_browse'] = include_browse
-        headers = {'Accept': 'application/json', }
+        if include_browse is not None:
+            query["include_browse"] = include_browse
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/volumes/{name}', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/volumes/{name}",
+            query=query,
+            headers=headers,
+        )
         return VolumeInfo.from_dict(res)
 
-    def update(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None) -> VolumeInfo:
+    def update(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+    ) -> VolumeInfo:
         """Update a Volume.
-        
+
         Updates the specified volume under the specified parent catalog and schema.
-        
+
         The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
         also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
         privilege on the parent schema.
-        
+
         Currently only the name, the owner or the comment of the volume could be updated.
-        
+
         :param name: str
           The three-level (fully qualified) name of the volume
         :param comment: str (optional)
@@ -12384,16 +15069,27 @@ def update(self,
           New name for the volume.
         :param owner: str (optional)
           The identifier of the user who owns the volume
-        
+
         :returns: :class:`VolumeInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if comment is not None:
+            body["comment"] = comment
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/volumes/{name}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/volumes/{name}",
+            body=body,
+            headers=headers,
+        )
         return VolumeInfo.from_dict(res)
 
 
@@ -12401,51 +15097,58 @@ class WorkspaceBindingsAPI:
     """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be
     accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list
     of workspaces. This API allows you to configure (bind) securables to workspaces.
-    
+
     NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the
     workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__.
-    
+
     A securable's workspace bindings can be configured by a metastore admin or the owner of the securable.
-    
+
     The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use
     the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
     ability to bind a securable in READ_ONLY mode (catalogs only).
-    
-    Securable types that support binding: - catalog - storage_credential - external_location"""
+
+    Securable types that support binding: - catalog - storage_credential - external_location
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def get(self, name: str) -> CurrentWorkspaceBindings:
         """Get catalog workspace bindings.
-        
+
         Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
         catalog.
-        
+
         :param name: str
           The name of the catalog.
-        
+
         :returns: :class:`CurrentWorkspaceBindings`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}",
+            headers=headers,
+        )
         return CurrentWorkspaceBindings.from_dict(res)
 
-    def get_bindings(self,
-                     securable_type: GetBindingsSecurableType,
-                     securable_name: str,
-                     *,
-                     max_results: Optional[int] = None,
-                     page_token: Optional[str] = None) -> Iterator[WorkspaceBinding]:
+    def get_bindings(
+        self,
+        securable_type: GetBindingsSecurableType,
+        securable_name: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[WorkspaceBinding]:
         """Get securable workspace bindings.
-        
+
         Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
         securable.
-        
+
         :param securable_type: :class:`GetBindingsSecurableType`
           The type of the securable to bind to a workspace.
         :param securable_name: str
@@ -12457,68 +15160,85 @@ def get_bindings(self,
           error is returned; - If not set, all the workspace bindings are returned (not recommended).
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`WorkspaceBinding`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}',
-                                query=query,
-                                headers=headers)
-            if 'bindings' in json:
-                for v in json['bindings']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}",
+                query=query,
+                headers=headers,
+            )
+            if "bindings" in json:
+                for v in json["bindings"]:
                     yield WorkspaceBinding.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               assign_workspaces: Optional[List[int]] = None,
-               unassign_workspaces: Optional[List[int]] = None) -> CurrentWorkspaceBindings:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        name: str,
+        *,
+        assign_workspaces: Optional[List[int]] = None,
+        unassign_workspaces: Optional[List[int]] = None,
+    ) -> CurrentWorkspaceBindings:
         """Update catalog workspace bindings.
-        
+
         Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
         catalog.
-        
+
         :param name: str
           The name of the catalog.
         :param assign_workspaces: List[int] (optional)
           A list of workspace IDs.
         :param unassign_workspaces: List[int] (optional)
           A list of workspace IDs.
-        
+
         :returns: :class:`CurrentWorkspaceBindings`
         """
         body = {}
-        if assign_workspaces is not None: body['assign_workspaces'] = [v for v in assign_workspaces]
-        if unassign_workspaces is not None: body['unassign_workspaces'] = [v for v in unassign_workspaces]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if assign_workspaces is not None:
+            body["assign_workspaces"] = [v for v in assign_workspaces]
+        if unassign_workspaces is not None:
+            body["unassign_workspaces"] = [v for v in unassign_workspaces]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}",
+            body=body,
+            headers=headers,
+        )
         return CurrentWorkspaceBindings.from_dict(res)
 
-    def update_bindings(self,
-                        securable_type: UpdateBindingsSecurableType,
-                        securable_name: str,
-                        *,
-                        add: Optional[List[WorkspaceBinding]] = None,
-                        remove: Optional[List[WorkspaceBinding]] = None) -> WorkspaceBindingsResponse:
+    def update_bindings(
+        self,
+        securable_type: UpdateBindingsSecurableType,
+        securable_name: str,
+        *,
+        add: Optional[List[WorkspaceBinding]] = None,
+        remove: Optional[List[WorkspaceBinding]] = None,
+    ) -> WorkspaceBindingsResponse:
         """Update securable workspace bindings.
-        
+
         Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
         securable.
-        
+
         :param securable_type: :class:`UpdateBindingsSecurableType`
           The type of the securable to bind to a workspace.
         :param securable_name: str
@@ -12527,16 +15247,23 @@ def update_bindings(self,
           List of workspace bindings
         :param remove: List[:class:`WorkspaceBinding`] (optional)
           List of workspace bindings
-        
+
         :returns: :class:`WorkspaceBindingsResponse`
         """
         body = {}
-        if add is not None: body['add'] = [v.as_dict() for v in add]
-        if remove is not None: body['remove'] = [v.as_dict() for v in remove]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if add is not None:
+            body["add"] = [v.as_dict() for v in add]
+        if remove is not None:
+            body["remove"] = [v.as_dict() for v in remove]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}",
+            body=body,
+            headers=headers,
+        )
         return WorkspaceBindingsResponse.from_dict(res)
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
index f7a213669..ab1974cb9 100755
--- a/databricks/sdk/service/cleanrooms.py
+++ b/databricks/sdk/service/cleanrooms.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 from databricks.sdk.service import catalog, jobs, settings, sharing
 
@@ -57,54 +57,74 @@ class CleanRoom:
     def as_dict(self) -> dict:
         """Serializes the CleanRoom into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_restricted is not None: body['access_restricted'] = self.access_restricted.value
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.access_restricted is not None:
+            body["access_restricted"] = self.access_restricted.value
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
         if self.local_collaborator_alias is not None:
-            body['local_collaborator_alias'] = self.local_collaborator_alias
-        if self.name is not None: body['name'] = self.name
-        if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict()
-        if self.owner is not None: body['owner'] = self.owner
-        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
-        if self.status is not None: body['status'] = self.status.value
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+            body["local_collaborator_alias"] = self.local_collaborator_alias
+        if self.name is not None:
+            body["name"] = self.name
+        if self.output_catalog:
+            body["output_catalog"] = self.output_catalog.as_dict()
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.remote_detailed_info:
+            body["remote_detailed_info"] = self.remote_detailed_info.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoom into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_restricted is not None: body['access_restricted'] = self.access_restricted
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.access_restricted is not None:
+            body["access_restricted"] = self.access_restricted
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
         if self.local_collaborator_alias is not None:
-            body['local_collaborator_alias'] = self.local_collaborator_alias
-        if self.name is not None: body['name'] = self.name
-        if self.output_catalog: body['output_catalog'] = self.output_catalog
-        if self.owner is not None: body['owner'] = self.owner
-        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info
-        if self.status is not None: body['status'] = self.status
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+            body["local_collaborator_alias"] = self.local_collaborator_alias
+        if self.name is not None:
+            body["name"] = self.name
+        if self.output_catalog:
+            body["output_catalog"] = self.output_catalog
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.remote_detailed_info:
+            body["remote_detailed_info"] = self.remote_detailed_info
+        if self.status is not None:
+            body["status"] = self.status
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoom:
         """Deserializes the CleanRoom from a dictionary."""
-        return cls(access_restricted=_enum(d, 'access_restricted', CleanRoomAccessRestricted),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   local_collaborator_alias=d.get('local_collaborator_alias', None),
-                   name=d.get('name', None),
-                   output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog),
-                   owner=d.get('owner', None),
-                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CleanRoomRemoteDetail),
-                   status=_enum(d, 'status', CleanRoomStatusEnum),
-                   updated_at=d.get('updated_at', None))
+        return cls(
+            access_restricted=_enum(d, "access_restricted", CleanRoomAccessRestricted),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            local_collaborator_alias=d.get("local_collaborator_alias", None),
+            name=d.get("name", None),
+            output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog),
+            owner=d.get("owner", None),
+            remote_detailed_info=_from_dict(d, "remote_detailed_info", CleanRoomRemoteDetail),
+            status=_enum(d, "status", CleanRoomStatusEnum),
+            updated_at=d.get("updated_at", None),
+        )
 
 
 class CleanRoomAccessRestricted(Enum):
 
-    CSP_MISMATCH = 'CSP_MISMATCH'
-    NO_RESTRICTION = 'NO_RESTRICTION'
+    CSP_MISMATCH = "CSP_MISMATCH"
+    NO_RESTRICTION = "NO_RESTRICTION"
 
 
 @dataclass
@@ -167,70 +187,96 @@ class CleanRoomAsset:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAsset into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.added_at is not None: body['added_at'] = self.added_at
-        if self.asset_type is not None: body['asset_type'] = self.asset_type.value
-        if self.foreign_table: body['foreign_table'] = self.foreign_table.as_dict()
+        if self.added_at is not None:
+            body["added_at"] = self.added_at
+        if self.asset_type is not None:
+            body["asset_type"] = self.asset_type.value
+        if self.foreign_table:
+            body["foreign_table"] = self.foreign_table.as_dict()
         if self.foreign_table_local_details:
-            body['foreign_table_local_details'] = self.foreign_table_local_details.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.notebook: body['notebook'] = self.notebook.as_dict()
+            body["foreign_table_local_details"] = self.foreign_table_local_details.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notebook:
+            body["notebook"] = self.notebook.as_dict()
         if self.owner_collaborator_alias is not None:
-            body['owner_collaborator_alias'] = self.owner_collaborator_alias
-        if self.status is not None: body['status'] = self.status.value
-        if self.table: body['table'] = self.table.as_dict()
-        if self.table_local_details: body['table_local_details'] = self.table_local_details.as_dict()
-        if self.view: body['view'] = self.view.as_dict()
-        if self.view_local_details: body['view_local_details'] = self.view_local_details.as_dict()
-        if self.volume_local_details: body['volume_local_details'] = self.volume_local_details.as_dict()
+            body["owner_collaborator_alias"] = self.owner_collaborator_alias
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.table:
+            body["table"] = self.table.as_dict()
+        if self.table_local_details:
+            body["table_local_details"] = self.table_local_details.as_dict()
+        if self.view:
+            body["view"] = self.view.as_dict()
+        if self.view_local_details:
+            body["view_local_details"] = self.view_local_details.as_dict()
+        if self.volume_local_details:
+            body["volume_local_details"] = self.volume_local_details.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAsset into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.added_at is not None: body['added_at'] = self.added_at
-        if self.asset_type is not None: body['asset_type'] = self.asset_type
-        if self.foreign_table: body['foreign_table'] = self.foreign_table
+        if self.added_at is not None:
+            body["added_at"] = self.added_at
+        if self.asset_type is not None:
+            body["asset_type"] = self.asset_type
+        if self.foreign_table:
+            body["foreign_table"] = self.foreign_table
         if self.foreign_table_local_details:
-            body['foreign_table_local_details'] = self.foreign_table_local_details
-        if self.name is not None: body['name'] = self.name
-        if self.notebook: body['notebook'] = self.notebook
+            body["foreign_table_local_details"] = self.foreign_table_local_details
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notebook:
+            body["notebook"] = self.notebook
         if self.owner_collaborator_alias is not None:
-            body['owner_collaborator_alias'] = self.owner_collaborator_alias
-        if self.status is not None: body['status'] = self.status
-        if self.table: body['table'] = self.table
-        if self.table_local_details: body['table_local_details'] = self.table_local_details
-        if self.view: body['view'] = self.view
-        if self.view_local_details: body['view_local_details'] = self.view_local_details
-        if self.volume_local_details: body['volume_local_details'] = self.volume_local_details
+            body["owner_collaborator_alias"] = self.owner_collaborator_alias
+        if self.status is not None:
+            body["status"] = self.status
+        if self.table:
+            body["table"] = self.table
+        if self.table_local_details:
+            body["table_local_details"] = self.table_local_details
+        if self.view:
+            body["view"] = self.view
+        if self.view_local_details:
+            body["view_local_details"] = self.view_local_details
+        if self.volume_local_details:
+            body["volume_local_details"] = self.volume_local_details
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAsset:
         """Deserializes the CleanRoomAsset from a dictionary."""
-        return cls(added_at=d.get('added_at', None),
-                   asset_type=_enum(d, 'asset_type', CleanRoomAssetAssetType),
-                   foreign_table=_from_dict(d, 'foreign_table', CleanRoomAssetForeignTable),
-                   foreign_table_local_details=_from_dict(d, 'foreign_table_local_details',
-                                                          CleanRoomAssetForeignTableLocalDetails),
-                   name=d.get('name', None),
-                   notebook=_from_dict(d, 'notebook', CleanRoomAssetNotebook),
-                   owner_collaborator_alias=d.get('owner_collaborator_alias', None),
-                   status=_enum(d, 'status', CleanRoomAssetStatusEnum),
-                   table=_from_dict(d, 'table', CleanRoomAssetTable),
-                   table_local_details=_from_dict(d, 'table_local_details', CleanRoomAssetTableLocalDetails),
-                   view=_from_dict(d, 'view', CleanRoomAssetView),
-                   view_local_details=_from_dict(d, 'view_local_details', CleanRoomAssetViewLocalDetails),
-                   volume_local_details=_from_dict(d, 'volume_local_details',
-                                                   CleanRoomAssetVolumeLocalDetails))
+        return cls(
+            added_at=d.get("added_at", None),
+            asset_type=_enum(d, "asset_type", CleanRoomAssetAssetType),
+            foreign_table=_from_dict(d, "foreign_table", CleanRoomAssetForeignTable),
+            foreign_table_local_details=_from_dict(
+                d,
+                "foreign_table_local_details",
+                CleanRoomAssetForeignTableLocalDetails,
+            ),
+            name=d.get("name", None),
+            notebook=_from_dict(d, "notebook", CleanRoomAssetNotebook),
+            owner_collaborator_alias=d.get("owner_collaborator_alias", None),
+            status=_enum(d, "status", CleanRoomAssetStatusEnum),
+            table=_from_dict(d, "table", CleanRoomAssetTable),
+            table_local_details=_from_dict(d, "table_local_details", CleanRoomAssetTableLocalDetails),
+            view=_from_dict(d, "view", CleanRoomAssetView),
+            view_local_details=_from_dict(d, "view_local_details", CleanRoomAssetViewLocalDetails),
+            volume_local_details=_from_dict(d, "volume_local_details", CleanRoomAssetVolumeLocalDetails),
+        )
 
 
 class CleanRoomAssetAssetType(Enum):
 
-    FOREIGN_TABLE = 'FOREIGN_TABLE'
-    NOTEBOOK_FILE = 'NOTEBOOK_FILE'
-    TABLE = 'TABLE'
-    VIEW = 'VIEW'
-    VOLUME = 'VOLUME'
+    FOREIGN_TABLE = "FOREIGN_TABLE"
+    NOTEBOOK_FILE = "NOTEBOOK_FILE"
+    TABLE = "TABLE"
+    VIEW = "VIEW"
+    VOLUME = "VOLUME"
 
 
 @dataclass
@@ -241,19 +287,21 @@ class CleanRoomAssetForeignTable:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetForeignTable into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetForeignTable into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns: body['columns'] = self.columns
+        if self.columns:
+            body["columns"] = self.columns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTable:
         """Deserializes the CleanRoomAssetForeignTable from a dictionary."""
-        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+        return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo))
 
 
 @dataclass
@@ -265,19 +313,21 @@ class CleanRoomAssetForeignTableLocalDetails:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetForeignTableLocalDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetForeignTableLocalDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTableLocalDetails:
         """Deserializes the CleanRoomAssetForeignTableLocalDetails from a dictionary."""
-        return cls(local_name=d.get('local_name', None))
+        return cls(local_name=d.get("local_name", None))
 
 
 @dataclass
@@ -292,28 +342,35 @@ class CleanRoomAssetNotebook:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.notebook_content is not None:
+            body["notebook_content"] = self.notebook_content
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetNotebook into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.notebook_content is not None:
+            body["notebook_content"] = self.notebook_content
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
         """Deserializes the CleanRoomAssetNotebook from a dictionary."""
-        return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None))
+        return cls(
+            etag=d.get("etag", None),
+            notebook_content=d.get("notebook_content", None),
+        )
 
 
 class CleanRoomAssetStatusEnum(Enum):
 
-    ACTIVE = 'ACTIVE'
-    PENDING = 'PENDING'
-    PERMISSION_DENIED = 'PERMISSION_DENIED'
+    ACTIVE = "ACTIVE"
+    PENDING = "PENDING"
+    PERMISSION_DENIED = "PERMISSION_DENIED"
 
 
 @dataclass
@@ -324,19 +381,21 @@ class CleanRoomAssetTable:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetTable into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetTable into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns: body['columns'] = self.columns
+        if self.columns:
+            body["columns"] = self.columns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTable:
         """Deserializes the CleanRoomAssetTable from a dictionary."""
-        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+        return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo))
 
 
 @dataclass
@@ -351,22 +410,28 @@ class CleanRoomAssetTableLocalDetails:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetTableLocalDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
-        if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions]
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
+        if self.partitions:
+            body["partitions"] = [v.as_dict() for v in self.partitions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetTableLocalDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
-        if self.partitions: body['partitions'] = self.partitions
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
+        if self.partitions:
+            body["partitions"] = self.partitions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTableLocalDetails:
         """Deserializes the CleanRoomAssetTableLocalDetails from a dictionary."""
-        return cls(local_name=d.get('local_name', None),
-                   partitions=_repeated_dict(d, 'partitions', sharing.PartitionSpecificationPartition))
+        return cls(
+            local_name=d.get("local_name", None),
+            partitions=_repeated_dict(d, "partitions", sharing.PartitionSpecificationPartition),
+        )
 
 
 @dataclass
@@ -377,19 +442,21 @@ class CleanRoomAssetView:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetView into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetView into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns: body['columns'] = self.columns
+        if self.columns:
+            body["columns"] = self.columns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetView:
         """Deserializes the CleanRoomAssetView from a dictionary."""
-        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+        return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo))
 
 
 @dataclass
@@ -401,19 +468,21 @@ class CleanRoomAssetViewLocalDetails:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetViewLocalDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetViewLocalDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetViewLocalDetails:
         """Deserializes the CleanRoomAssetViewLocalDetails from a dictionary."""
-        return cls(local_name=d.get('local_name', None))
+        return cls(local_name=d.get("local_name", None))
 
 
 @dataclass
@@ -425,19 +494,21 @@ class CleanRoomAssetVolumeLocalDetails:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetVolumeLocalDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomAssetVolumeLocalDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.local_name is not None:
+            body["local_name"] = self.local_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetVolumeLocalDetails:
         """Deserializes the CleanRoomAssetVolumeLocalDetails from a dictionary."""
-        return cls(local_name=d.get('local_name', None))
+        return cls(local_name=d.get("local_name", None))
 
 
 @dataclass
@@ -477,38 +548,48 @@ class CleanRoomCollaborator:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomCollaborator into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.collaborator_alias is not None:
+            body["collaborator_alias"] = self.collaborator_alias
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.global_metastore_id is not None:
+            body["global_metastore_id"] = self.global_metastore_id
         if self.invite_recipient_email is not None:
-            body['invite_recipient_email'] = self.invite_recipient_email
+            body["invite_recipient_email"] = self.invite_recipient_email
         if self.invite_recipient_workspace_id is not None:
-            body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id
-        if self.organization_name is not None: body['organization_name'] = self.organization_name
+            body["invite_recipient_workspace_id"] = self.invite_recipient_workspace_id
+        if self.organization_name is not None:
+            body["organization_name"] = self.organization_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomCollaborator into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.collaborator_alias is not None:
+            body["collaborator_alias"] = self.collaborator_alias
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.global_metastore_id is not None:
+            body["global_metastore_id"] = self.global_metastore_id
         if self.invite_recipient_email is not None:
-            body['invite_recipient_email'] = self.invite_recipient_email
+            body["invite_recipient_email"] = self.invite_recipient_email
         if self.invite_recipient_workspace_id is not None:
-            body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id
-        if self.organization_name is not None: body['organization_name'] = self.organization_name
+            body["invite_recipient_workspace_id"] = self.invite_recipient_workspace_id
+        if self.organization_name is not None:
+            body["organization_name"] = self.organization_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaborator:
         """Deserializes the CleanRoomCollaborator from a dictionary."""
-        return cls(collaborator_alias=d.get('collaborator_alias', None),
-                   display_name=d.get('display_name', None),
-                   global_metastore_id=d.get('global_metastore_id', None),
-                   invite_recipient_email=d.get('invite_recipient_email', None),
-                   invite_recipient_workspace_id=d.get('invite_recipient_workspace_id', None),
-                   organization_name=d.get('organization_name', None))
+        return cls(
+            collaborator_alias=d.get("collaborator_alias", None),
+            display_name=d.get("display_name", None),
+            global_metastore_id=d.get("global_metastore_id", None),
+            invite_recipient_email=d.get("invite_recipient_email", None),
+            invite_recipient_workspace_id=d.get("invite_recipient_workspace_id", None),
+            organization_name=d.get("organization_name", None),
+        )
 
 
 @dataclass
@@ -542,40 +623,52 @@ def as_dict(self) -> dict:
         """Serializes the CleanRoomNotebookTaskRun into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.collaborator_job_run_info:
-            body['collaborator_job_run_info'] = self.collaborator_job_run_info.as_dict()
-        if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state.as_dict()
-        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+            body["collaborator_job_run_info"] = self.collaborator_job_run_info.as_dict()
+        if self.notebook_job_run_state:
+            body["notebook_job_run_state"] = self.notebook_job_run_state.as_dict()
+        if self.notebook_name is not None:
+            body["notebook_name"] = self.notebook_name
         if self.output_schema_expiration_time is not None:
-            body['output_schema_expiration_time'] = self.output_schema_expiration_time
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.start_time is not None: body['start_time'] = self.start_time
+            body["output_schema_expiration_time"] = self.output_schema_expiration_time
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomNotebookTaskRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info
-        if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state
-        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.collaborator_job_run_info:
+            body["collaborator_job_run_info"] = self.collaborator_job_run_info
+        if self.notebook_job_run_state:
+            body["notebook_job_run_state"] = self.notebook_job_run_state
+        if self.notebook_name is not None:
+            body["notebook_name"] = self.notebook_name
         if self.output_schema_expiration_time is not None:
-            body['output_schema_expiration_time'] = self.output_schema_expiration_time
-        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.start_time is not None: body['start_time'] = self.start_time
+            body["output_schema_expiration_time"] = self.output_schema_expiration_time
+        if self.output_schema_name is not None:
+            body["output_schema_name"] = self.output_schema_name
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookTaskRun:
         """Deserializes the CleanRoomNotebookTaskRun from a dictionary."""
-        return cls(collaborator_job_run_info=_from_dict(d, 'collaborator_job_run_info',
-                                                        CollaboratorJobRunInfo),
-                   notebook_job_run_state=_from_dict(d, 'notebook_job_run_state', jobs.CleanRoomTaskRunState),
-                   notebook_name=d.get('notebook_name', None),
-                   output_schema_expiration_time=d.get('output_schema_expiration_time', None),
-                   output_schema_name=d.get('output_schema_name', None),
-                   run_duration=d.get('run_duration', None),
-                   start_time=d.get('start_time', None))
+        return cls(
+            collaborator_job_run_info=_from_dict(d, "collaborator_job_run_info", CollaboratorJobRunInfo),
+            notebook_job_run_state=_from_dict(d, "notebook_job_run_state", jobs.CleanRoomTaskRunState),
+            notebook_name=d.get("notebook_name", None),
+            output_schema_expiration_time=d.get("output_schema_expiration_time", None),
+            output_schema_name=d.get("output_schema_name", None),
+            run_duration=d.get("run_duration", None),
+            start_time=d.get("start_time", None),
+        )
 
 
 @dataclass
@@ -591,29 +684,35 @@ class CleanRoomOutputCatalog:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomOutputCatalog into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.status is not None: body['status'] = self.status.value
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomOutputCatalog into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.status is not None: body['status'] = self.status
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomOutputCatalog:
         """Deserializes the CleanRoomOutputCatalog from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   status=_enum(d, 'status', CleanRoomOutputCatalogOutputCatalogStatus))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            status=_enum(d, "status", CleanRoomOutputCatalogOutputCatalogStatus),
+        )
 
 
 class CleanRoomOutputCatalogOutputCatalogStatus(Enum):
 
-    CREATED = 'CREATED'
-    NOT_CREATED = 'NOT_CREATED'
-    NOT_ELIGIBLE = 'NOT_ELIGIBLE'
+    CREATED = "CREATED"
+    NOT_CREATED = "NOT_CREATED"
+    NOT_ELIGIBLE = "NOT_ELIGIBLE"
 
 
 @dataclass
@@ -649,48 +748,61 @@ class CleanRoomRemoteDetail:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomRemoteDetail into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id
-        if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor
-        if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators]
+        if self.central_clean_room_id is not None:
+            body["central_clean_room_id"] = self.central_clean_room_id
+        if self.cloud_vendor is not None:
+            body["cloud_vendor"] = self.cloud_vendor
+        if self.collaborators:
+            body["collaborators"] = [v.as_dict() for v in self.collaborators]
         if self.compliance_security_profile:
-            body['compliance_security_profile'] = self.compliance_security_profile.as_dict()
-        if self.creator: body['creator'] = self.creator.as_dict()
-        if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy.as_dict()
-        if self.region is not None: body['region'] = self.region
+            body["compliance_security_profile"] = self.compliance_security_profile.as_dict()
+        if self.creator:
+            body["creator"] = self.creator.as_dict()
+        if self.egress_network_policy:
+            body["egress_network_policy"] = self.egress_network_policy.as_dict()
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomRemoteDetail into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id
-        if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor
-        if self.collaborators: body['collaborators'] = self.collaborators
+        if self.central_clean_room_id is not None:
+            body["central_clean_room_id"] = self.central_clean_room_id
+        if self.cloud_vendor is not None:
+            body["cloud_vendor"] = self.cloud_vendor
+        if self.collaborators:
+            body["collaborators"] = self.collaborators
         if self.compliance_security_profile:
-            body['compliance_security_profile'] = self.compliance_security_profile
-        if self.creator: body['creator'] = self.creator
-        if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy
-        if self.region is not None: body['region'] = self.region
+            body["compliance_security_profile"] = self.compliance_security_profile
+        if self.creator:
+            body["creator"] = self.creator
+        if self.egress_network_policy:
+            body["egress_network_policy"] = self.egress_network_policy
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomRemoteDetail:
         """Deserializes the CleanRoomRemoteDetail from a dictionary."""
-        return cls(central_clean_room_id=d.get('central_clean_room_id', None),
-                   cloud_vendor=d.get('cloud_vendor', None),
-                   collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaborator),
-                   compliance_security_profile=_from_dict(d, 'compliance_security_profile',
-                                                          ComplianceSecurityProfile),
-                   creator=_from_dict(d, 'creator', CleanRoomCollaborator),
-                   egress_network_policy=_from_dict(d, 'egress_network_policy', settings.EgressNetworkPolicy),
-                   region=d.get('region', None))
+        return cls(
+            central_clean_room_id=d.get("central_clean_room_id", None),
+            cloud_vendor=d.get("cloud_vendor", None),
+            collaborators=_repeated_dict(d, "collaborators", CleanRoomCollaborator),
+            compliance_security_profile=_from_dict(d, "compliance_security_profile", ComplianceSecurityProfile),
+            creator=_from_dict(d, "creator", CleanRoomCollaborator),
+            egress_network_policy=_from_dict(d, "egress_network_policy", settings.EgressNetworkPolicy),
+            region=d.get("region", None),
+        )
 
 
 class CleanRoomStatusEnum(Enum):
 
-    ACTIVE = 'ACTIVE'
-    DELETED = 'DELETED'
-    FAILED = 'FAILED'
-    PROVISIONING = 'PROVISIONING'
+    ACTIVE = "ACTIVE"
+    DELETED = "DELETED"
+    FAILED = "FAILED"
+    PROVISIONING = "PROVISIONING"
 
 
 @dataclass
@@ -713,37 +825,43 @@ class CollaboratorJobRunInfo:
     def as_dict(self) -> dict:
         """Serializes the CollaboratorJobRunInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
-        if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id
+        if self.collaborator_alias is not None:
+            body["collaborator_alias"] = self.collaborator_alias
+        if self.collaborator_job_id is not None:
+            body["collaborator_job_id"] = self.collaborator_job_id
         if self.collaborator_job_run_id is not None:
-            body['collaborator_job_run_id'] = self.collaborator_job_run_id
+            body["collaborator_job_run_id"] = self.collaborator_job_run_id
         if self.collaborator_task_run_id is not None:
-            body['collaborator_task_run_id'] = self.collaborator_task_run_id
+            body["collaborator_task_run_id"] = self.collaborator_task_run_id
         if self.collaborator_workspace_id is not None:
-            body['collaborator_workspace_id'] = self.collaborator_workspace_id
+            body["collaborator_workspace_id"] = self.collaborator_workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CollaboratorJobRunInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
-        if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id
+        if self.collaborator_alias is not None:
+            body["collaborator_alias"] = self.collaborator_alias
+        if self.collaborator_job_id is not None:
+            body["collaborator_job_id"] = self.collaborator_job_id
         if self.collaborator_job_run_id is not None:
-            body['collaborator_job_run_id'] = self.collaborator_job_run_id
+            body["collaborator_job_run_id"] = self.collaborator_job_run_id
         if self.collaborator_task_run_id is not None:
-            body['collaborator_task_run_id'] = self.collaborator_task_run_id
+            body["collaborator_task_run_id"] = self.collaborator_task_run_id
         if self.collaborator_workspace_id is not None:
-            body['collaborator_workspace_id'] = self.collaborator_workspace_id
+            body["collaborator_workspace_id"] = self.collaborator_workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CollaboratorJobRunInfo:
         """Deserializes the CollaboratorJobRunInfo from a dictionary."""
-        return cls(collaborator_alias=d.get('collaborator_alias', None),
-                   collaborator_job_id=d.get('collaborator_job_id', None),
-                   collaborator_job_run_id=d.get('collaborator_job_run_id', None),
-                   collaborator_task_run_id=d.get('collaborator_task_run_id', None),
-                   collaborator_workspace_id=d.get('collaborator_workspace_id', None))
+        return cls(
+            collaborator_alias=d.get("collaborator_alias", None),
+            collaborator_job_id=d.get("collaborator_job_id", None),
+            collaborator_job_run_id=d.get("collaborator_job_run_id", None),
+            collaborator_task_run_id=d.get("collaborator_task_run_id", None),
+            collaborator_workspace_id=d.get("collaborator_workspace_id", None),
+        )
 
 
 @dataclass
@@ -760,23 +878,27 @@ def as_dict(self) -> dict:
         """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.compliance_standards:
-            body['compliance_standards'] = [v.as_dict() for v in self.compliance_standards]
-        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+            body["compliance_standards"] = [v.as_dict() for v in self.compliance_standards]
+        if self.is_enabled is not None:
+            body["is_enabled"] = self.is_enabled
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
-        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        if self.compliance_standards:
+            body["compliance_standards"] = self.compliance_standards
+        if self.is_enabled is not None:
+            body["is_enabled"] = self.is_enabled
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile:
         """Deserializes the ComplianceSecurityProfile from a dictionary."""
-        return cls(compliance_standards=_repeated_dict(d, 'compliance_standards',
-                                                       settings.ComplianceStandard),
-                   is_enabled=d.get('is_enabled', None))
+        return cls(
+            compliance_standards=_repeated_dict(d, "compliance_standards", settings.ComplianceStandard),
+            is_enabled=d.get("is_enabled", None),
+        )
 
 
 @dataclass
@@ -786,19 +908,21 @@ class CreateCleanRoomOutputCatalogResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateCleanRoomOutputCatalogResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict()
+        if self.output_catalog:
+            body["output_catalog"] = self.output_catalog.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCleanRoomOutputCatalogResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.output_catalog: body['output_catalog'] = self.output_catalog
+        if self.output_catalog:
+            body["output_catalog"] = self.output_catalog
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoomOutputCatalogResponse:
         """Deserializes the CreateCleanRoomOutputCatalogResponse from a dictionary."""
-        return cls(output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog))
+        return cls(output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog))
 
 
 @dataclass
@@ -853,22 +977,28 @@ class ListCleanRoomAssetsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListCleanRoomAssetsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.assets: body['assets'] = [v.as_dict() for v in self.assets]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.assets:
+            body["assets"] = [v.as_dict() for v in self.assets]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListCleanRoomAssetsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.assets: body['assets'] = self.assets
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.assets:
+            body["assets"] = self.assets
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomAssetsResponse:
         """Deserializes the ListCleanRoomAssetsResponse from a dictionary."""
-        return cls(assets=_repeated_dict(d, 'assets', CleanRoomAsset),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            assets=_repeated_dict(d, "assets", CleanRoomAsset),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -883,22 +1013,28 @@ class ListCleanRoomNotebookTaskRunsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListCleanRoomNotebookTaskRunsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.runs:
+            body["runs"] = [v.as_dict() for v in self.runs]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListCleanRoomNotebookTaskRunsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.runs: body['runs'] = self.runs
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.runs:
+            body["runs"] = self.runs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomNotebookTaskRunsResponse:
         """Deserializes the ListCleanRoomNotebookTaskRunsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   runs=_repeated_dict(d, 'runs', CleanRoomNotebookTaskRun))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            runs=_repeated_dict(d, "runs", CleanRoomNotebookTaskRun),
+        )
 
 
 @dataclass
@@ -912,22 +1048,28 @@ class ListCleanRoomsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.clean_rooms:
+            body["clean_rooms"] = [v.as_dict() for v in self.clean_rooms]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListCleanRoomsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_rooms: body['clean_rooms'] = self.clean_rooms
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.clean_rooms:
+            body["clean_rooms"] = self.clean_rooms
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse:
         """Deserializes the ListCleanRoomsResponse from a dictionary."""
-        return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoom),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            clean_rooms=_repeated_dict(d, "clean_rooms", CleanRoom),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -940,21 +1082,28 @@ class UpdateCleanRoomRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.clean_room: body['clean_room'] = self.clean_room.as_dict()
-        if self.name is not None: body['name'] = self.name
+        if self.clean_room:
+            body["clean_room"] = self.clean_room.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_room: body['clean_room'] = self.clean_room
-        if self.name is not None: body['name'] = self.name
+        if self.clean_room:
+            body["clean_room"] = self.clean_room
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoomRequest:
         """Deserializes the UpdateCleanRoomRequest from a dictionary."""
-        return cls(clean_room=_from_dict(d, 'clean_room', CleanRoom), name=d.get('name', None))
+        return cls(
+            clean_room=_from_dict(d, "clean_room", CleanRoom),
+            name=d.get("name", None),
+        )
 
 
 class CleanRoomAssetsAPI:
@@ -966,111 +1115,139 @@ def __init__(self, api_client):
 
     def create(self, clean_room_name: str, *, asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset:
         """Create an asset.
-        
+
         Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
         asset that is added through this method, the clean room owner must also have enough privilege on the
         asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
         access the asset. Typically, you should use a group as the clean room owner.
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param asset: :class:`CleanRoomAsset` (optional)
           Metadata of the clean room asset
-        
+
         :returns: :class:`CleanRoomAsset`
         """
         body = asset.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/clean-rooms/{clean_room_name}/assets',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/clean-rooms/{clean_room_name}/assets",
+            body=body,
+            headers=headers,
+        )
         return CleanRoomAsset.from_dict(res)
 
-    def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str):
+    def delete(
+        self,
+        clean_room_name: str,
+        asset_type: CleanRoomAssetAssetType,
+        asset_full_name: str,
+    ):
         """Delete an asset.
-        
+
         Delete a clean room asset - unshare/remove the asset from the clean room
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param asset_type: :class:`CleanRoomAssetAssetType`
           The type of the asset.
         :param asset_full_name: str
           The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
-        
-        
-        """
 
-        headers = {'Accept': 'application/json', }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}',
-                     headers=headers)
+        """
 
-    def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType,
-            asset_full_name: str) -> CleanRoomAsset:
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}",
+            headers=headers,
+        )
+
+    def get(
+        self,
+        clean_room_name: str,
+        asset_type: CleanRoomAssetAssetType,
+        asset_full_name: str,
+    ) -> CleanRoomAsset:
         """Get an asset.
-        
+
         Get the details of a clean room asset by its type and full name.
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param asset_type: :class:`CleanRoomAssetAssetType`
           The type of the asset.
         :param asset_full_name: str
           The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
-        
+
         :returns: :class:`CleanRoomAsset`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}",
+            headers=headers,
+        )
         return CleanRoomAsset.from_dict(res)
 
     def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]:
         """List assets.
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`CleanRoomAsset`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/clean-rooms/{clean_room_name}/assets',
-                                query=query,
-                                headers=headers)
-            if 'assets' in json:
-                for v in json['assets']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/clean-rooms/{clean_room_name}/assets",
+                query=query,
+                headers=headers,
+            )
+            if "assets" in json:
+                for v in json["assets"]:
                     yield CleanRoomAsset.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               clean_room_name: str,
-               asset_type: CleanRoomAssetAssetType,
-               name: str,
-               *,
-               asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        clean_room_name: str,
+        asset_type: CleanRoomAssetAssetType,
+        name: str,
+        *,
+        asset: Optional[CleanRoomAsset] = None,
+    ) -> CleanRoomAsset:
         """Update an asset.
-        
+
         Update a clean room asset. For example, updating the content of a notebook; changing the shared
         partitions of a table; etc.
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param asset_type: :class:`CleanRoomAssetAssetType`
@@ -1078,23 +1255,28 @@ def update(self,
         :param name: str
           A fully qualified name that uniquely identifies the asset within the clean room. This is also the
           name displayed in the clean room UI.
-          
+
           For UC securable assets (tables, volumes, etc.), the format is
           *shared_catalog*.*shared_schema*.*asset_name*
-          
+
           For notebooks, the name is the notebook file name.
         :param asset: :class:`CleanRoomAsset` (optional)
           Metadata of the clean room asset
-        
+
         :returns: :class:`CleanRoomAsset`
         """
         body = asset.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}",
+            body=body,
+            headers=headers,
+        )
         return CleanRoomAsset.from_dict(res)
 
 
@@ -1104,16 +1286,18 @@ class CleanRoomTaskRunsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def list(self,
-             clean_room_name: str,
-             *,
-             notebook_name: Optional[str] = None,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[CleanRoomNotebookTaskRun]:
+    def list(
+        self,
+        clean_room_name: str,
+        *,
+        notebook_name: Optional[str] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[CleanRoomNotebookTaskRun]:
         """List notebook task runs.
-        
+
         List all the historical notebook task runs in a clean room.
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param notebook_name: str (optional)
@@ -1122,27 +1306,34 @@ def list(self,
           The maximum number of task runs to return
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
         """
 
         query = {}
-        if notebook_name is not None: query['notebook_name'] = notebook_name
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if notebook_name is not None:
+            query["notebook_name"] = notebook_name
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/clean-rooms/{clean_room_name}/runs',
-                                query=query,
-                                headers=headers)
-            if 'runs' in json:
-                for v in json['runs']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/clean-rooms/{clean_room_name}/runs",
+                query=query,
+                headers=headers,
+            )
+            if "runs" in json:
+                for v in json["runs"]:
                     yield CleanRoomNotebookTaskRun.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class CleanRoomsAPI:
@@ -1155,129 +1346,152 @@ def __init__(self, api_client):
 
     def create(self, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
         """Create a clean room.
-        
+
         Create a new clean room with the specified collaborators. This method is asynchronous; the returned
         name field inside the clean_room field can be used to poll the clean room status, using the
         :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
         state, with only name, owner, comment, created_at and status populated. The clean room will be usable
         once it enters an ACTIVE state.
-        
+
         The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
-        
+
         :param clean_room: :class:`CleanRoom` (optional)
-        
+
         :returns: :class:`CleanRoom`
         """
         body = clean_room.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/clean-rooms', body=body, headers=headers)
+        res = self._api.do("POST", "/api/2.0/clean-rooms", body=body, headers=headers)
         return CleanRoom.from_dict(res)
 
     def create_output_catalog(
-            self,
-            clean_room_name: str,
-            *,
-            output_catalog: Optional[CleanRoomOutputCatalog] = None) -> CreateCleanRoomOutputCatalogResponse:
+        self,
+        clean_room_name: str,
+        *,
+        output_catalog: Optional[CleanRoomOutputCatalog] = None,
+    ) -> CreateCleanRoomOutputCatalogResponse:
         """Create an output catalog.
-        
+
         Create the output catalog of the clean room.
-        
+
         :param clean_room_name: str
           Name of the clean room.
         :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
-        
+
         :returns: :class:`CreateCleanRoomOutputCatalogResponse`
         """
         body = output_catalog.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/clean-rooms/{clean_room_name}/output-catalogs',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/clean-rooms/{clean_room_name}/output-catalogs",
+            body=body,
+            headers=headers,
+        )
         return CreateCleanRoomOutputCatalogResponse.from_dict(res)
 
     def delete(self, name: str):
         """Delete a clean room.
-        
+
         Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
         collaborators have not deleted the clean room, they will still have the clean room in their metastore,
         but it will be in a DELETED state and no operations other than deletion can be performed on it.
-        
+
         :param name: str
           Name of the clean room.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/clean-rooms/{name}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/clean-rooms/{name}", headers=headers)
 
     def get(self, name: str) -> CleanRoom:
         """Get a clean room.
-        
+
         Get the details of a clean room given its name.
-        
+
         :param name: str
-        
+
         :returns: :class:`CleanRoom`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/clean-rooms/{name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/clean-rooms/{name}", headers=headers)
         return CleanRoom.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[CleanRoom]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[CleanRoom]:
         """List clean rooms.
-        
+
         Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
         returned.
-        
+
         :param page_size: int (optional)
           Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`CleanRoom`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/clean-rooms', query=query, headers=headers)
-            if 'clean_rooms' in json:
-                for v in json['clean_rooms']:
+            json = self._api.do("GET", "/api/2.0/clean-rooms", query=query, headers=headers)
+            if "clean_rooms" in json:
+                for v in json["clean_rooms"]:
                     yield CleanRoom.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
         """Update a clean room.
-        
+
         Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
         privilege, or be metastore admin.
-        
+
         When the caller is a metastore admin, only the __owner__ field can be updated.
-        
+
         :param name: str
           Name of the clean room.
         :param clean_room: :class:`CleanRoom` (optional)
-        
+
         :returns: :class:`CleanRoom`
         """
         body = {}
-        if clean_room is not None: body['clean_room'] = clean_room.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/clean-rooms/{name}', body=body, headers=headers)
+        if clean_room is not None:
+            body["clean_room"] = clean_room.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("PATCH", f"/api/2.0/clean-rooms/{name}", body=body, headers=headers)
         return CleanRoom.from_dict(res)
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index c16f699bb..52db6cfc0 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -48,30 +48,38 @@ class AddInstanceProfile:
     def as_dict(self) -> dict:
         """Serializes the AddInstanceProfile into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.iam_role_arn is not None:
+            body["iam_role_arn"] = self.iam_role_arn
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.is_meta_instance_profile is not None:
-            body['is_meta_instance_profile'] = self.is_meta_instance_profile
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["is_meta_instance_profile"] = self.is_meta_instance_profile
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.iam_role_arn is not None:
+            body["iam_role_arn"] = self.iam_role_arn
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.is_meta_instance_profile is not None:
-            body['is_meta_instance_profile'] = self.is_meta_instance_profile
-        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+            body["is_meta_instance_profile"] = self.is_meta_instance_profile
+        if self.skip_validation is not None:
+            body["skip_validation"] = self.skip_validation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddInstanceProfile:
         """Deserializes the AddInstanceProfile from a dictionary."""
-        return cls(iam_role_arn=d.get('iam_role_arn', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   is_meta_instance_profile=d.get('is_meta_instance_profile', None),
-                   skip_validation=d.get('skip_validation', None))
+        return cls(
+            iam_role_arn=d.get("iam_role_arn", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            is_meta_instance_profile=d.get("is_meta_instance_profile", None),
+            skip_validation=d.get("skip_validation", None),
+        )
 
 
 @dataclass
@@ -102,19 +110,21 @@ class Adlsgen2Info:
     def as_dict(self) -> dict:
         """Serializes the Adlsgen2Info into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Adlsgen2Info into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Adlsgen2Info:
         """Deserializes the Adlsgen2Info from a dictionary."""
-        return cls(destination=d.get('destination', None))
+        return cls(destination=d.get("destination", None))
 
 
 @dataclass
@@ -130,21 +140,28 @@ class AutoScale:
     def as_dict(self) -> dict:
         """Serializes the AutoScale into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.max_workers is not None: body['max_workers'] = self.max_workers
-        if self.min_workers is not None: body['min_workers'] = self.min_workers
+        if self.max_workers is not None:
+            body["max_workers"] = self.max_workers
+        if self.min_workers is not None:
+            body["min_workers"] = self.min_workers
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AutoScale into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.max_workers is not None: body['max_workers'] = self.max_workers
-        if self.min_workers is not None: body['min_workers'] = self.min_workers
+        if self.max_workers is not None:
+            body["max_workers"] = self.max_workers
+        if self.min_workers is not None:
+            body["min_workers"] = self.min_workers
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoScale:
         """Deserializes the AutoScale from a dictionary."""
-        return cls(max_workers=d.get('max_workers', None), min_workers=d.get('min_workers', None))
+        return cls(
+            max_workers=d.get("max_workers", None),
+            min_workers=d.get("min_workers", None),
+        )
 
 
 @dataclass
@@ -231,58 +248,79 @@ class AwsAttributes:
     def as_dict(self) -> dict:
         """Serializes the AwsAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability.value
-        if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count
-        if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops
-        if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size
-        if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput
-        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value
-        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.availability is not None:
+            body["availability"] = self.availability.value
+        if self.ebs_volume_count is not None:
+            body["ebs_volume_count"] = self.ebs_volume_count
+        if self.ebs_volume_iops is not None:
+            body["ebs_volume_iops"] = self.ebs_volume_iops
+        if self.ebs_volume_size is not None:
+            body["ebs_volume_size"] = self.ebs_volume_size
+        if self.ebs_volume_throughput is not None:
+            body["ebs_volume_throughput"] = self.ebs_volume_throughput
+        if self.ebs_volume_type is not None:
+            body["ebs_volume_type"] = self.ebs_volume_type.value
+        if self.first_on_demand is not None:
+            body["first_on_demand"] = self.first_on_demand
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.spot_bid_price_percent is not None:
-            body['spot_bid_price_percent'] = self.spot_bid_price_percent
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+            body["spot_bid_price_percent"] = self.spot_bid_price_percent
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability
-        if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count
-        if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops
-        if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size
-        if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput
-        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type
-        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.availability is not None:
+            body["availability"] = self.availability
+        if self.ebs_volume_count is not None:
+            body["ebs_volume_count"] = self.ebs_volume_count
+        if self.ebs_volume_iops is not None:
+            body["ebs_volume_iops"] = self.ebs_volume_iops
+        if self.ebs_volume_size is not None:
+            body["ebs_volume_size"] = self.ebs_volume_size
+        if self.ebs_volume_throughput is not None:
+            body["ebs_volume_throughput"] = self.ebs_volume_throughput
+        if self.ebs_volume_type is not None:
+            body["ebs_volume_type"] = self.ebs_volume_type
+        if self.first_on_demand is not None:
+            body["first_on_demand"] = self.first_on_demand
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.spot_bid_price_percent is not None:
-            body['spot_bid_price_percent'] = self.spot_bid_price_percent
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+            body["spot_bid_price_percent"] = self.spot_bid_price_percent
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsAttributes:
         """Deserializes the AwsAttributes from a dictionary."""
-        return cls(availability=_enum(d, 'availability', AwsAvailability),
-                   ebs_volume_count=d.get('ebs_volume_count', None),
-                   ebs_volume_iops=d.get('ebs_volume_iops', None),
-                   ebs_volume_size=d.get('ebs_volume_size', None),
-                   ebs_volume_throughput=d.get('ebs_volume_throughput', None),
-                   ebs_volume_type=_enum(d, 'ebs_volume_type', EbsVolumeType),
-                   first_on_demand=d.get('first_on_demand', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   spot_bid_price_percent=d.get('spot_bid_price_percent', None),
-                   zone_id=d.get('zone_id', None))
+        return cls(
+            availability=_enum(d, "availability", AwsAvailability),
+            ebs_volume_count=d.get("ebs_volume_count", None),
+            ebs_volume_iops=d.get("ebs_volume_iops", None),
+            ebs_volume_size=d.get("ebs_volume_size", None),
+            ebs_volume_throughput=d.get("ebs_volume_throughput", None),
+            ebs_volume_type=_enum(d, "ebs_volume_type", EbsVolumeType),
+            first_on_demand=d.get("first_on_demand", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            spot_bid_price_percent=d.get("spot_bid_price_percent", None),
+            zone_id=d.get("zone_id", None),
+        )
 
 
 class AwsAvailability(Enum):
     """Availability type used for all subsequent nodes past the `first_on_demand` ones.
-    
-    Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster."""
 
-    ON_DEMAND = 'ON_DEMAND'
-    SPOT = 'SPOT'
-    SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK'
+    Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.
+    """
+
+    ON_DEMAND = "ON_DEMAND"
+    SPOT = "SPOT"
+    SPOT_WITH_FALLBACK = "SPOT_WITH_FALLBACK"
 
 
 @dataclass
@@ -313,28 +351,38 @@ class AzureAttributes:
     def as_dict(self) -> dict:
         """Serializes the AzureAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability.value
-        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
-        if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info.as_dict()
-        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        if self.availability is not None:
+            body["availability"] = self.availability.value
+        if self.first_on_demand is not None:
+            body["first_on_demand"] = self.first_on_demand
+        if self.log_analytics_info:
+            body["log_analytics_info"] = self.log_analytics_info.as_dict()
+        if self.spot_bid_max_price is not None:
+            body["spot_bid_max_price"] = self.spot_bid_max_price
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability
-        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
-        if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info
-        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        if self.availability is not None:
+            body["availability"] = self.availability
+        if self.first_on_demand is not None:
+            body["first_on_demand"] = self.first_on_demand
+        if self.log_analytics_info:
+            body["log_analytics_info"] = self.log_analytics_info
+        if self.spot_bid_max_price is not None:
+            body["spot_bid_max_price"] = self.spot_bid_max_price
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureAttributes:
         """Deserializes the AzureAttributes from a dictionary."""
-        return cls(availability=_enum(d, 'availability', AzureAvailability),
-                   first_on_demand=d.get('first_on_demand', None),
-                   log_analytics_info=_from_dict(d, 'log_analytics_info', LogAnalyticsInfo),
-                   spot_bid_max_price=d.get('spot_bid_max_price', None))
+        return cls(
+            availability=_enum(d, "availability", AzureAvailability),
+            first_on_demand=d.get("first_on_demand", None),
+            log_analytics_info=_from_dict(d, "log_analytics_info", LogAnalyticsInfo),
+            spot_bid_max_price=d.get("spot_bid_max_price", None),
+        )
 
 
 class AzureAvailability(Enum):
@@ -342,9 +390,9 @@ class AzureAvailability(Enum):
     `first_on_demand` is zero (which only happens on pool clusters), this availability type will be
     used for the entire cluster."""
 
-    ON_DEMAND_AZURE = 'ON_DEMAND_AZURE'
-    SPOT_AZURE = 'SPOT_AZURE'
-    SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE'
+    ON_DEMAND_AZURE = "ON_DEMAND_AZURE"
+    SPOT_AZURE = "SPOT_AZURE"
+    SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE"
 
 
 @dataclass
@@ -358,25 +406,33 @@ class CancelCommand:
     def as_dict(self) -> dict:
         """Serializes the CancelCommand into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.command_id is not None: body['commandId'] = self.command_id
-        if self.context_id is not None: body['contextId'] = self.context_id
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.command_id is not None:
+            body["commandId"] = self.command_id
+        if self.context_id is not None:
+            body["contextId"] = self.context_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CancelCommand into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.command_id is not None: body['commandId'] = self.command_id
-        if self.context_id is not None: body['contextId'] = self.context_id
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.command_id is not None:
+            body["commandId"] = self.command_id
+        if self.context_id is not None:
+            body["contextId"] = self.context_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelCommand:
         """Deserializes the CancelCommand from a dictionary."""
-        return cls(cluster_id=d.get('clusterId', None),
-                   command_id=d.get('commandId', None),
-                   context_id=d.get('contextId', None))
+        return cls(
+            cluster_id=d.get("clusterId", None),
+            command_id=d.get("commandId", None),
+            context_id=d.get("contextId", None),
+        )
 
 
 @dataclass
@@ -409,21 +465,28 @@ class ChangeClusterOwner:
     def as_dict(self) -> dict:
         """Serializes the ChangeClusterOwner into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.owner_username is not None: body['owner_username'] = self.owner_username
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.owner_username is not None:
+            body["owner_username"] = self.owner_username
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.owner_username is not None: body['owner_username'] = self.owner_username
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.owner_username is not None:
+            body["owner_username"] = self.owner_username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwner:
         """Deserializes the ChangeClusterOwner from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None), owner_username=d.get('owner_username', None))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            owner_username=d.get("owner_username", None),
+        )
 
 
 @dataclass
@@ -456,21 +519,25 @@ class ClientsTypes:
     def as_dict(self) -> dict:
         """Serializes the ClientsTypes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.jobs is not None: body['jobs'] = self.jobs
-        if self.notebooks is not None: body['notebooks'] = self.notebooks
+        if self.jobs is not None:
+            body["jobs"] = self.jobs
+        if self.notebooks is not None:
+            body["notebooks"] = self.notebooks
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClientsTypes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.jobs is not None: body['jobs'] = self.jobs
-        if self.notebooks is not None: body['notebooks'] = self.notebooks
+        if self.jobs is not None:
+            body["jobs"] = self.jobs
+        if self.notebooks is not None:
+            body["notebooks"] = self.notebooks
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClientsTypes:
         """Deserializes the ClientsTypes from a dictionary."""
-        return cls(jobs=d.get('jobs', None), notebooks=d.get('notebooks', None))
+        return cls(jobs=d.get("jobs", None), notebooks=d.get("notebooks", None))
 
 
 @dataclass
@@ -481,19 +548,21 @@ class CloneCluster:
     def as_dict(self) -> dict:
         """Serializes the CloneCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
+        if self.source_cluster_id is not None:
+            body["source_cluster_id"] = self.source_cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CloneCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
+        if self.source_cluster_id is not None:
+            body["source_cluster_id"] = self.source_cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloneCluster:
         """Deserializes the CloneCluster from a dictionary."""
-        return cls(source_cluster_id=d.get('source_cluster_id', None))
+        return cls(source_cluster_id=d.get("source_cluster_id", None))
 
 
 @dataclass
@@ -503,25 +572,27 @@ class CloudProviderNodeInfo:
     def as_dict(self) -> dict:
         """Serializes the CloudProviderNodeInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.status: body['status'] = [v.value for v in self.status]
+        if self.status:
+            body["status"] = [v.value for v in self.status]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CloudProviderNodeInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.status: body['status'] = self.status
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudProviderNodeInfo:
         """Deserializes the CloudProviderNodeInfo from a dictionary."""
-        return cls(status=_repeated_enum(d, 'status', CloudProviderNodeStatus))
+        return cls(status=_repeated_enum(d, "status", CloudProviderNodeStatus))
 
 
 class CloudProviderNodeStatus(Enum):
 
-    NOT_AVAILABLE_IN_REGION = 'NotAvailableInRegion'
-    NOT_ENABLED_ON_SUBSCRIPTION = 'NotEnabledOnSubscription'
+    NOT_AVAILABLE_IN_REGION = "NotAvailableInRegion"
+    NOT_ENABLED_ON_SUBSCRIPTION = "NotEnabledOnSubscription"
 
 
 @dataclass
@@ -541,30 +612,38 @@ class ClusterAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the ClusterAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlRequest:
         """Deserializes the ClusterAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', ClusterPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", ClusterPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -587,33 +666,43 @@ class ClusterAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the ClusterAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlResponse:
         """Deserializes the ClusterAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', ClusterPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", ClusterPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -782,102 +871,152 @@ def as_dict(self) -> dict:
         """Serializes the ClusterAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
-        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode.value
+        if self.docker_image:
+            body["docker_image"] = self.docker_image.as_dict()
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine.value
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
-        if self.docker_image: body['docker_image'] = self.docker_image
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode
+        if self.docker_image:
+            body["docker_image"] = self.docker_image
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
         """Deserializes the ClusterAttributes from a dictionary."""
-        return cls(autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_name=d.get('cluster_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   is_single_node=d.get('is_single_node', None),
-                   kind=_enum(d, 'kind', Kind),
-                   node_type_id=d.get('node_type_id', None),
-                   policy_id=d.get('policy_id', None),
-                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
-                   single_user_name=d.get('single_user_name', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   spark_version=d.get('spark_version', None),
-                   ssh_public_keys=d.get('ssh_public_keys', None),
-                   use_ml_runtime=d.get('use_ml_runtime', None),
-                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+        return cls(
+            autotermination_minutes=d.get("autotermination_minutes", None),
+            aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf),
+            cluster_name=d.get("cluster_name", None),
+            custom_tags=d.get("custom_tags", None),
+            data_security_mode=_enum(d, "data_security_mode", DataSecurityMode),
+            docker_image=_from_dict(d, "docker_image", DockerImage),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            is_single_node=d.get("is_single_node", None),
+            kind=_enum(d, "kind", Kind),
+            node_type_id=d.get("node_type_id", None),
+            policy_id=d.get("policy_id", None),
+            runtime_engine=_enum(d, "runtime_engine", RuntimeEngine),
+            single_user_name=d.get("single_user_name", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            spark_version=d.get("spark_version", None),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+            use_ml_runtime=d.get("use_ml_runtime", None),
+            workload_type=_from_dict(d, "workload_type", WorkloadType),
+        )
 
 
 @dataclass
@@ -896,25 +1035,33 @@ class ClusterCompliance:
     def as_dict(self) -> dict:
         """Serializes the ClusterCompliance into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.violations: body['violations'] = self.violations
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterCompliance into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.violations: body['violations'] = self.violations
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterCompliance:
         """Deserializes the ClusterCompliance from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None),
-                   is_compliant=d.get('is_compliant', None),
-                   violations=d.get('violations', None))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            is_compliant=d.get("is_compliant", None),
+            violations=d.get("violations", None),
+        )
 
 
 @dataclass
@@ -1177,166 +1324,258 @@ class ClusterDetails:
     def as_dict(self) -> dict:
         """Serializes the ClusterDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status.as_dict()
-        if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.cluster_source is not None: body['cluster_source'] = self.cluster_source.value
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
-        if self.default_tags: body['default_tags'] = self.default_tags
-        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
-        if self.driver: body['driver'] = self.driver.as_dict()
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.cluster_cores is not None:
+            body["cluster_cores"] = self.cluster_cores
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.cluster_log_status:
+            body["cluster_log_status"] = self.cluster_log_status.as_dict()
+        if self.cluster_memory_mb is not None:
+            body["cluster_memory_mb"] = self.cluster_memory_mb
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.cluster_source is not None:
+            body["cluster_source"] = self.cluster_source.value
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode.value
+        if self.default_tags:
+            body["default_tags"] = self.default_tags
+        if self.docker_image:
+            body["docker_image"] = self.docker_image.as_dict()
+        if self.driver:
+            body["driver"] = self.driver.as_dict()
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.executors: body['executors'] = [v.as_dict() for v in self.executors]
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
-        if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.spec: body['spec'] = self.spec.as_dict()
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state is not None: body['state'] = self.state.value
-        if self.state_message is not None: body['state_message'] = self.state_message
-        if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
-        if self.termination_reason: body['termination_reason'] = self.termination_reason.as_dict()
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.executors:
+            body["executors"] = [v.as_dict() for v in self.executors]
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.jdbc_port is not None:
+            body["jdbc_port"] = self.jdbc_port
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.last_restarted_time is not None:
+            body["last_restarted_time"] = self.last_restarted_time
+        if self.last_state_loss_time is not None:
+            body["last_state_loss_time"] = self.last_state_loss_time
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine.value
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_context_id is not None:
+            body["spark_context_id"] = self.spark_context_id
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.spec:
+            body["spec"] = self.spec.as_dict()
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.state_message is not None:
+            body["state_message"] = self.state_message
+        if self.terminated_time is not None:
+            body["terminated_time"] = self.terminated_time
+        if self.termination_reason:
+            body["termination_reason"] = self.termination_reason.as_dict()
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status
-        if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.cluster_source is not None: body['cluster_source'] = self.cluster_source
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
-        if self.default_tags: body['default_tags'] = self.default_tags
-        if self.docker_image: body['docker_image'] = self.docker_image
-        if self.driver: body['driver'] = self.driver
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.cluster_cores is not None:
+            body["cluster_cores"] = self.cluster_cores
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.cluster_log_status:
+            body["cluster_log_status"] = self.cluster_log_status
+        if self.cluster_memory_mb is not None:
+            body["cluster_memory_mb"] = self.cluster_memory_mb
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.cluster_source is not None:
+            body["cluster_source"] = self.cluster_source
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode
+        if self.default_tags:
+            body["default_tags"] = self.default_tags
+        if self.docker_image:
+            body["docker_image"] = self.docker_image
+        if self.driver:
+            body["driver"] = self.driver
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.executors: body['executors'] = self.executors
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
-        if self.kind is not None: body['kind'] = self.kind
-        if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
-        if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.spec: body['spec'] = self.spec
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state is not None: body['state'] = self.state
-        if self.state_message is not None: body['state_message'] = self.state_message
-        if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
-        if self.termination_reason: body['termination_reason'] = self.termination_reason
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.executors:
+            body["executors"] = self.executors
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.jdbc_port is not None:
+            body["jdbc_port"] = self.jdbc_port
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.last_restarted_time is not None:
+            body["last_restarted_time"] = self.last_restarted_time
+        if self.last_state_loss_time is not None:
+            body["last_state_loss_time"] = self.last_state_loss_time
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_context_id is not None:
+            body["spark_context_id"] = self.spark_context_id
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.spec:
+            body["spec"] = self.spec
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state is not None:
+            body["state"] = self.state
+        if self.state_message is not None:
+            body["state_message"] = self.state_message
+        if self.terminated_time is not None:
+            body["terminated_time"] = self.terminated_time
+        if self.termination_reason:
+            body["termination_reason"] = self.termination_reason
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
         """Deserializes the ClusterDetails from a dictionary."""
-        return cls(autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   cluster_cores=d.get('cluster_cores', None),
-                   cluster_id=d.get('cluster_id', None),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_log_status=_from_dict(d, 'cluster_log_status', LogSyncStatus),
-                   cluster_memory_mb=d.get('cluster_memory_mb', None),
-                   cluster_name=d.get('cluster_name', None),
-                   cluster_source=_enum(d, 'cluster_source', ClusterSource),
-                   creator_user_name=d.get('creator_user_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   default_tags=d.get('default_tags', None),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver=_from_dict(d, 'driver', SparkNode),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   executors=_repeated_dict(d, 'executors', SparkNode),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   is_single_node=d.get('is_single_node', None),
-                   jdbc_port=d.get('jdbc_port', None),
-                   kind=_enum(d, 'kind', Kind),
-                   last_restarted_time=d.get('last_restarted_time', None),
-                   last_state_loss_time=d.get('last_state_loss_time', None),
-                   node_type_id=d.get('node_type_id', None),
-                   num_workers=d.get('num_workers', None),
-                   policy_id=d.get('policy_id', None),
-                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
-                   single_user_name=d.get('single_user_name', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_context_id=d.get('spark_context_id', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   spark_version=d.get('spark_version', None),
-                   spec=_from_dict(d, 'spec', ClusterSpec),
-                   ssh_public_keys=d.get('ssh_public_keys', None),
-                   start_time=d.get('start_time', None),
-                   state=_enum(d, 'state', State),
-                   state_message=d.get('state_message', None),
-                   terminated_time=d.get('terminated_time', None),
-                   termination_reason=_from_dict(d, 'termination_reason', TerminationReason),
-                   use_ml_runtime=d.get('use_ml_runtime', None),
-                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+        return cls(
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            autotermination_minutes=d.get("autotermination_minutes", None),
+            aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes),
+            cluster_cores=d.get("cluster_cores", None),
+            cluster_id=d.get("cluster_id", None),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf),
+            cluster_log_status=_from_dict(d, "cluster_log_status", LogSyncStatus),
+            cluster_memory_mb=d.get("cluster_memory_mb", None),
+            cluster_name=d.get("cluster_name", None),
+            cluster_source=_enum(d, "cluster_source", ClusterSource),
+            creator_user_name=d.get("creator_user_name", None),
+            custom_tags=d.get("custom_tags", None),
+            data_security_mode=_enum(d, "data_security_mode", DataSecurityMode),
+            default_tags=d.get("default_tags", None),
+            docker_image=_from_dict(d, "docker_image", DockerImage),
+            driver=_from_dict(d, "driver", SparkNode),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            executors=_repeated_dict(d, "executors", SparkNode),
+            gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            is_single_node=d.get("is_single_node", None),
+            jdbc_port=d.get("jdbc_port", None),
+            kind=_enum(d, "kind", Kind),
+            last_restarted_time=d.get("last_restarted_time", None),
+            last_state_loss_time=d.get("last_state_loss_time", None),
+            node_type_id=d.get("node_type_id", None),
+            num_workers=d.get("num_workers", None),
+            policy_id=d.get("policy_id", None),
+            runtime_engine=_enum(d, "runtime_engine", RuntimeEngine),
+            single_user_name=d.get("single_user_name", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_context_id=d.get("spark_context_id", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            spark_version=d.get("spark_version", None),
+            spec=_from_dict(d, "spec", ClusterSpec),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+            start_time=d.get("start_time", None),
+            state=_enum(d, "state", State),
+            state_message=d.get("state_message", None),
+            terminated_time=d.get("terminated_time", None),
+            termination_reason=_from_dict(d, "termination_reason", TerminationReason),
+            use_ml_runtime=d.get("use_ml_runtime", None),
+            workload_type=_from_dict(d, "workload_type", WorkloadType),
+        )
 
 
 @dataclass
@@ -1359,32 +1598,43 @@ class ClusterEvent:
     def as_dict(self) -> dict:
         """Serializes the ClusterEvent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         if self.data_plane_event_details:
-            body['data_plane_event_details'] = self.data_plane_event_details.as_dict()
-        if self.details: body['details'] = self.details.as_dict()
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.type is not None: body['type'] = self.type.value
+            body["data_plane_event_details"] = self.data_plane_event_details.as_dict()
+        if self.details:
+            body["details"] = self.details.as_dict()
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterEvent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details
-        if self.details: body['details'] = self.details
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.type is not None: body['type'] = self.type
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.data_plane_event_details:
+            body["data_plane_event_details"] = self.data_plane_event_details
+        if self.details:
+            body["details"] = self.details
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterEvent:
         """Deserializes the ClusterEvent from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None),
-                   data_plane_event_details=_from_dict(d, 'data_plane_event_details', DataPlaneEventDetails),
-                   details=_from_dict(d, 'details', EventDetails),
-                   timestamp=d.get('timestamp', None),
-                   type=_enum(d, 'type', EventType))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            data_plane_event_details=_from_dict(d, "data_plane_event_details", DataPlaneEventDetails),
+            details=_from_dict(d, "details", EventDetails),
+            timestamp=d.get("timestamp", None),
+            type=_enum(d, "type", EventType),
+        )
 
 
 @dataclass
@@ -1398,22 +1648,28 @@ class ClusterLibraryStatuses:
     def as_dict(self) -> dict:
         """Serializes the ClusterLibraryStatuses into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses]
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.library_statuses:
+            body["library_statuses"] = [v.as_dict() for v in self.library_statuses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterLibraryStatuses into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.library_statuses: body['library_statuses'] = self.library_statuses
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.library_statuses:
+            body["library_statuses"] = self.library_statuses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLibraryStatuses:
         """Deserializes the ClusterLibraryStatuses from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None),
-                   library_statuses=_repeated_dict(d, 'library_statuses', LibraryFullStatus))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            library_statuses=_repeated_dict(d, "library_statuses", LibraryFullStatus),
+        )
 
 
 @dataclass
@@ -1435,25 +1691,33 @@ class ClusterLogConf:
     def as_dict(self) -> dict:
         """Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbfs: body['dbfs'] = self.dbfs.as_dict()
-        if self.s3: body['s3'] = self.s3.as_dict()
-        if self.volumes: body['volumes'] = self.volumes.as_dict()
+        if self.dbfs:
+            body["dbfs"] = self.dbfs.as_dict()
+        if self.s3:
+            body["s3"] = self.s3.as_dict()
+        if self.volumes:
+            body["volumes"] = self.volumes.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterLogConf into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbfs: body['dbfs'] = self.dbfs
-        if self.s3: body['s3'] = self.s3
-        if self.volumes: body['volumes'] = self.volumes
+        if self.dbfs:
+            body["dbfs"] = self.dbfs
+        if self.s3:
+            body["s3"] = self.s3
+        if self.volumes:
+            body["volumes"] = self.volumes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLogConf:
         """Deserializes the ClusterLogConf from a dictionary."""
-        return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
-                   s3=_from_dict(d, 's3', S3StorageInfo),
-                   volumes=_from_dict(d, 'volumes', VolumesStorageInfo))
+        return cls(
+            dbfs=_from_dict(d, "dbfs", DbfsStorageInfo),
+            s3=_from_dict(d, "s3", S3StorageInfo),
+            volumes=_from_dict(d, "volumes", VolumesStorageInfo),
+        )
 
 
 @dataclass
@@ -1468,33 +1732,41 @@ class ClusterPermission:
     def as_dict(self) -> dict:
         """Serializes the ClusterPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermission:
         """Deserializes the ClusterPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', ClusterPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", ClusterPermissionLevel),
+        )
 
 
 class ClusterPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_ATTACH_TO = 'CAN_ATTACH_TO'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_RESTART = 'CAN_RESTART'
+    CAN_ATTACH_TO = "CAN_ATTACH_TO"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_RESTART = "CAN_RESTART"
 
 
 @dataclass
@@ -1509,25 +1781,32 @@ def as_dict(self) -> dict:
         """Serializes the ClusterPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissions:
         """Deserializes the ClusterPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -1540,22 +1819,28 @@ class ClusterPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the ClusterPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsDescription:
         """Deserializes the ClusterPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', ClusterPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", ClusterPermissionLevel),
+        )
 
 
 @dataclass
@@ -1569,22 +1854,27 @@ def as_dict(self) -> dict:
         """Serializes the ClusterPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsRequest:
         """Deserializes the ClusterPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterAccessControlRequest),
-                   cluster_id=d.get('cluster_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlRequest),
+            cluster_id=d.get("cluster_id", None),
+        )
 
 
 @dataclass
@@ -1604,30 +1894,38 @@ class ClusterPolicyAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the ClusterPolicyAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPolicyAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlRequest:
         """Deserializes the ClusterPolicyAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1650,33 +1948,43 @@ class ClusterPolicyAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the ClusterPolicyAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPolicyAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlResponse:
         """Deserializes the ClusterPolicyAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', ClusterPolicyPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", ClusterPolicyPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1691,31 +1999,39 @@ class ClusterPolicyPermission:
     def as_dict(self) -> dict:
         """Serializes the ClusterPolicyPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPolicyPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermission:
         """Deserializes the ClusterPolicyPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel),
+        )
 
 
 class ClusterPolicyPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_USE = 'CAN_USE'
+    CAN_USE = "CAN_USE"
 
 
 @dataclass
@@ -1730,26 +2046,32 @@ def as_dict(self) -> dict:
         """Serializes the ClusterPolicyPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPolicyPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissions:
         """Deserializes the ClusterPolicyPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      ClusterPolicyAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -1762,22 +2084,28 @@ class ClusterPolicyPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the ClusterPolicyPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPolicyPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsDescription:
         """Deserializes the ClusterPolicyPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel),
+        )
 
 
 @dataclass
@@ -1791,23 +2119,27 @@ def as_dict(self) -> dict:
         """Serializes the ClusterPolicyPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.cluster_policy_id is not None:
+            body["cluster_policy_id"] = self.cluster_policy_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.cluster_policy_id is not None:
+            body["cluster_policy_id"] = self.cluster_policy_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsRequest:
         """Deserializes the ClusterPolicyPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      ClusterPolicyAccessControlRequest),
-                   cluster_policy_id=d.get('cluster_policy_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlRequest),
+            cluster_policy_id=d.get("cluster_policy_id", None),
+        )
 
 
 @dataclass
@@ -1831,25 +2163,33 @@ class ClusterSettingsChange:
     def as_dict(self) -> dict:
         """Serializes the ClusterSettingsChange into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.field is not None: body['field'] = self.field
-        if self.new_value is not None: body['new_value'] = self.new_value
-        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        if self.field is not None:
+            body["field"] = self.field
+        if self.new_value is not None:
+            body["new_value"] = self.new_value
+        if self.previous_value is not None:
+            body["previous_value"] = self.previous_value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterSettingsChange into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.field is not None: body['field'] = self.field
-        if self.new_value is not None: body['new_value'] = self.new_value
-        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        if self.field is not None:
+            body["field"] = self.field
+        if self.new_value is not None:
+            body["new_value"] = self.new_value
+        if self.previous_value is not None:
+            body["previous_value"] = self.previous_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSettingsChange:
         """Deserializes the ClusterSettingsChange from a dictionary."""
-        return cls(field=d.get('field', None),
-                   new_value=d.get('new_value', None),
-                   previous_value=d.get('previous_value', None))
+        return cls(
+            field=d.get("field", None),
+            new_value=d.get("new_value", None),
+            previous_value=d.get("previous_value", None),
+        )
 
 
 @dataclass
@@ -1871,34 +2211,42 @@ class ClusterSize:
     def as_dict(self) -> dict:
         """Serializes the ClusterSize into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterSize into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSize:
         """Deserializes the ClusterSize from a dictionary."""
-        return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), num_workers=d.get('num_workers', None))
+        return cls(
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            num_workers=d.get("num_workers", None),
+        )
 
 
 class ClusterSource(Enum):
     """Determines whether the cluster was created by a user through the UI, created by the Databricks
-    Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only."""
+    Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only.
+    """
 
-    API = 'API'
-    JOB = 'JOB'
-    MODELS = 'MODELS'
-    PIPELINE = 'PIPELINE'
-    PIPELINE_MAINTENANCE = 'PIPELINE_MAINTENANCE'
-    SQL = 'SQL'
-    UI = 'UI'
+    API = "API"
+    JOB = "JOB"
+    MODELS = "MODELS"
+    PIPELINE = "PIPELINE"
+    PIPELINE_MAINTENANCE = "PIPELINE_MAINTENANCE"
+    SQL = "SQL"
+    UI = "UI"
 
 
 @dataclass
@@ -2085,113 +2433,167 @@ def as_dict(self) -> dict:
         """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
-        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode.value
+        if self.docker_image:
+            body["docker_image"] = self.docker_image.as_dict()
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine.value
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
-        if self.docker_image: body['docker_image'] = self.docker_image
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode
+        if self.docker_image:
+            body["docker_image"] = self.docker_image
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
         """Deserializes the ClusterSpec from a dictionary."""
-        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
-                   autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_name=d.get('cluster_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   is_single_node=d.get('is_single_node', None),
-                   kind=_enum(d, 'kind', Kind),
-                   node_type_id=d.get('node_type_id', None),
-                   num_workers=d.get('num_workers', None),
-                   policy_id=d.get('policy_id', None),
-                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
-                   single_user_name=d.get('single_user_name', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   spark_version=d.get('spark_version', None),
-                   ssh_public_keys=d.get('ssh_public_keys', None),
-                   use_ml_runtime=d.get('use_ml_runtime', None),
-                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+        return cls(
+            apply_policy_default_values=d.get("apply_policy_default_values", None),
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            autotermination_minutes=d.get("autotermination_minutes", None),
+            aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf),
+            cluster_name=d.get("cluster_name", None),
+            custom_tags=d.get("custom_tags", None),
+            data_security_mode=_enum(d, "data_security_mode", DataSecurityMode),
+            docker_image=_from_dict(d, "docker_image", DockerImage),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            is_single_node=d.get("is_single_node", None),
+            kind=_enum(d, "kind", Kind),
+            node_type_id=d.get("node_type_id", None),
+            num_workers=d.get("num_workers", None),
+            policy_id=d.get("policy_id", None),
+            runtime_engine=_enum(d, "runtime_engine", RuntimeEngine),
+            single_user_name=d.get("single_user_name", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            spark_version=d.get("spark_version", None),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+            use_ml_runtime=d.get("use_ml_runtime", None),
+            workload_type=_from_dict(d, "workload_type", WorkloadType),
+        )
 
 
 @dataclass
@@ -2210,38 +2612,48 @@ class Command:
     def as_dict(self) -> dict:
         """Serializes the Command into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.command is not None: body['command'] = self.command
-        if self.context_id is not None: body['contextId'] = self.context_id
-        if self.language is not None: body['language'] = self.language.value
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.command is not None:
+            body["command"] = self.command
+        if self.context_id is not None:
+            body["contextId"] = self.context_id
+        if self.language is not None:
+            body["language"] = self.language.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Command into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.command is not None: body['command'] = self.command
-        if self.context_id is not None: body['contextId'] = self.context_id
-        if self.language is not None: body['language'] = self.language
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.command is not None:
+            body["command"] = self.command
+        if self.context_id is not None:
+            body["contextId"] = self.context_id
+        if self.language is not None:
+            body["language"] = self.language
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Command:
         """Deserializes the Command from a dictionary."""
-        return cls(cluster_id=d.get('clusterId', None),
-                   command=d.get('command', None),
-                   context_id=d.get('contextId', None),
-                   language=_enum(d, 'language', Language))
+        return cls(
+            cluster_id=d.get("clusterId", None),
+            command=d.get("command", None),
+            context_id=d.get("contextId", None),
+            language=_enum(d, "language", Language),
+        )
 
 
 class CommandStatus(Enum):
 
-    CANCELLED = 'Cancelled'
-    CANCELLING = 'Cancelling'
-    ERROR = 'Error'
-    FINISHED = 'Finished'
-    QUEUED = 'Queued'
-    RUNNING = 'Running'
+    CANCELLED = "Cancelled"
+    CANCELLING = "Cancelling"
+    ERROR = "Error"
+    FINISHED = "Finished"
+    QUEUED = "Queued"
+    RUNNING = "Running"
 
 
 @dataclass
@@ -2255,32 +2667,40 @@ class CommandStatusResponse:
     def as_dict(self) -> dict:
         """Serializes the CommandStatusResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.results: body['results'] = self.results.as_dict()
-        if self.status is not None: body['status'] = self.status.value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.results:
+            body["results"] = self.results.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CommandStatusResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.results: body['results'] = self.results
-        if self.status is not None: body['status'] = self.status
+        if self.id is not None:
+            body["id"] = self.id
+        if self.results:
+            body["results"] = self.results
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CommandStatusResponse:
         """Deserializes the CommandStatusResponse from a dictionary."""
-        return cls(id=d.get('id', None),
-                   results=_from_dict(d, 'results', Results),
-                   status=_enum(d, 'status', CommandStatus))
+        return cls(
+            id=d.get("id", None),
+            results=_from_dict(d, "results", Results),
+            status=_enum(d, "status", CommandStatus),
+        )
 
 
 class ContextStatus(Enum):
 
-    ERROR = 'Error'
-    PENDING = 'Pending'
-    RUNNING = 'Running'
+    ERROR = "Error"
+    PENDING = "Pending"
+    RUNNING = "Running"
 
 
 @dataclass
@@ -2292,21 +2712,25 @@ class ContextStatusResponse:
     def as_dict(self) -> dict:
         """Serializes the ContextStatusResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.status is not None: body['status'] = self.status.value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ContextStatusResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.status is not None: body['status'] = self.status
+        if self.id is not None:
+            body["id"] = self.id
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContextStatusResponse:
         """Deserializes the ContextStatusResponse from a dictionary."""
-        return cls(id=d.get('id', None), status=_enum(d, 'status', ContextStatus))
+        return cls(id=d.get("id", None), status=_enum(d, "status", ContextStatus))
 
 
 @dataclass
@@ -2497,116 +2921,172 @@ def as_dict(self) -> dict:
         """Serializes the CreateCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.clone_from: body['clone_from'] = self.clone_from.as_dict()
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
-        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.clone_from:
+            body["clone_from"] = self.clone_from.as_dict()
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode.value
+        if self.docker_image:
+            body["docker_image"] = self.docker_image.as_dict()
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine.value
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCluster into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.clone_from: body['clone_from'] = self.clone_from
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
-        if self.docker_image: body['docker_image'] = self.docker_image
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.clone_from:
+            body["clone_from"] = self.clone_from
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode
+        if self.docker_image:
+            body["docker_image"] = self.docker_image
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
         """Deserializes the CreateCluster from a dictionary."""
-        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
-                   autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   clone_from=_from_dict(d, 'clone_from', CloneCluster),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_name=d.get('cluster_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   is_single_node=d.get('is_single_node', None),
-                   kind=_enum(d, 'kind', Kind),
-                   node_type_id=d.get('node_type_id', None),
-                   num_workers=d.get('num_workers', None),
-                   policy_id=d.get('policy_id', None),
-                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
-                   single_user_name=d.get('single_user_name', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   spark_version=d.get('spark_version', None),
-                   ssh_public_keys=d.get('ssh_public_keys', None),
-                   use_ml_runtime=d.get('use_ml_runtime', None),
-                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+        return cls(
+            apply_policy_default_values=d.get("apply_policy_default_values", None),
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            autotermination_minutes=d.get("autotermination_minutes", None),
+            aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes),
+            clone_from=_from_dict(d, "clone_from", CloneCluster),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf),
+            cluster_name=d.get("cluster_name", None),
+            custom_tags=d.get("custom_tags", None),
+            data_security_mode=_enum(d, "data_security_mode", DataSecurityMode),
+            docker_image=_from_dict(d, "docker_image", DockerImage),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            is_single_node=d.get("is_single_node", None),
+            kind=_enum(d, "kind", Kind),
+            node_type_id=d.get("node_type_id", None),
+            num_workers=d.get("num_workers", None),
+            policy_id=d.get("policy_id", None),
+            runtime_engine=_enum(d, "runtime_engine", RuntimeEngine),
+            single_user_name=d.get("single_user_name", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            spark_version=d.get("spark_version", None),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+            use_ml_runtime=d.get("use_ml_runtime", None),
+            workload_type=_from_dict(d, "workload_type", WorkloadType),
+        )
 
 
 @dataclass
@@ -2616,19 +3096,21 @@ class CreateClusterResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateClusterResponse:
         """Deserializes the CreateClusterResponse from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None))
+        return cls(cluster_id=d.get("cluster_id", None))
 
 
 @dataclass
@@ -2641,21 +3123,28 @@ class CreateContext:
     def as_dict(self) -> dict:
         """Serializes the CreateContext into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.language is not None: body['language'] = self.language.value
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.language is not None:
+            body["language"] = self.language.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateContext into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.language is not None: body['language'] = self.language
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.language is not None:
+            body["language"] = self.language
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateContext:
         """Deserializes the CreateContext from a dictionary."""
-        return cls(cluster_id=d.get('clusterId', None), language=_enum(d, 'language', Language))
+        return cls(
+            cluster_id=d.get("clusterId", None),
+            language=_enum(d, "language", Language),
+        )
 
 
 @dataclass
@@ -2723,59 +3212,83 @@ class CreateInstancePool:
     def as_dict(self) -> dict:
         """Serializes the CreateInstancePool into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.disk_spec:
+            body["disk_spec"] = self.disk_spec.as_dict()
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
         if self.preloaded_docker_images:
-            body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
+            body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images]
         if self.preloaded_spark_versions:
-            body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
+            body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.disk_spec: body['disk_spec'] = self.disk_spec
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.disk_spec:
+            body["disk_spec"] = self.disk_spec
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
-        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.preloaded_docker_images:
+            body["preloaded_docker_images"] = self.preloaded_docker_images
+        if self.preloaded_spark_versions:
+            body["preloaded_spark_versions"] = self.preloaded_spark_versions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstancePool:
         """Deserializes the CreateInstancePool from a dictionary."""
-        return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
-                   custom_tags=d.get('custom_tags', None),
-                   disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes),
-                   idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
-                   instance_pool_name=d.get('instance_pool_name', None),
-                   max_capacity=d.get('max_capacity', None),
-                   min_idle_instances=d.get('min_idle_instances', None),
-                   node_type_id=d.get('node_type_id', None),
-                   preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage),
-                   preloaded_spark_versions=d.get('preloaded_spark_versions', None))
+        return cls(
+            aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes),
+            custom_tags=d.get("custom_tags", None),
+            disk_spec=_from_dict(d, "disk_spec", DiskSpec),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes),
+            idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None),
+            instance_pool_name=d.get("instance_pool_name", None),
+            max_capacity=d.get("max_capacity", None),
+            min_idle_instances=d.get("min_idle_instances", None),
+            node_type_id=d.get("node_type_id", None),
+            preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage),
+            preloaded_spark_versions=d.get("preloaded_spark_versions", None),
+        )
 
 
 @dataclass
@@ -2786,19 +3299,21 @@ class CreateInstancePoolResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse:
         """Deserializes the CreateInstancePoolResponse from a dictionary."""
-        return cls(instance_pool_id=d.get('instance_pool_id', None))
+        return cls(instance_pool_id=d.get("instance_pool_id", None))
 
 
 @dataclass
@@ -2842,39 +3357,53 @@ class CreatePolicy:
     def as_dict(self) -> dict:
         """Serializes the CreatePolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
-        if self.name is not None: body['name'] = self.name
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = self.max_clusters_per_user
+        if self.name is not None:
+            body["name"] = self.name
         if self.policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+            body["policy_family_definition_overrides"] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.libraries: body['libraries'] = self.libraries
-        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
-        if self.name is not None: body['name'] = self.name
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = self.max_clusters_per_user
+        if self.name is not None:
+            body["name"] = self.name
         if self.policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+            body["policy_family_definition_overrides"] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePolicy:
         """Deserializes the CreatePolicy from a dictionary."""
-        return cls(definition=d.get('definition', None),
-                   description=d.get('description', None),
-                   libraries=_repeated_dict(d, 'libraries', Library),
-                   max_clusters_per_user=d.get('max_clusters_per_user', None),
-                   name=d.get('name', None),
-                   policy_family_definition_overrides=d.get('policy_family_definition_overrides', None),
-                   policy_family_id=d.get('policy_family_id', None))
+        return cls(
+            definition=d.get("definition", None),
+            description=d.get("description", None),
+            libraries=_repeated_dict(d, "libraries", Library),
+            max_clusters_per_user=d.get("max_clusters_per_user", None),
+            name=d.get("name", None),
+            policy_family_definition_overrides=d.get("policy_family_definition_overrides", None),
+            policy_family_id=d.get("policy_family_id", None),
+        )
 
 
 @dataclass
@@ -2885,19 +3414,21 @@ class CreatePolicyResponse:
     def as_dict(self) -> dict:
         """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePolicyResponse:
         """Deserializes the CreatePolicyResponse from a dictionary."""
-        return cls(policy_id=d.get('policy_id', None))
+        return cls(policy_id=d.get("policy_id", None))
 
 
 @dataclass
@@ -2908,19 +3439,21 @@ class CreateResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
-        return cls(script_id=d.get('script_id', None))
+        return cls(script_id=d.get("script_id", None))
 
 
 @dataclass
@@ -2930,19 +3463,21 @@ class Created:
     def as_dict(self) -> dict:
         """Serializes the Created into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Created into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Created:
         """Deserializes the Created from a dictionary."""
-        return cls(id=d.get('id', None))
+        return cls(id=d.get("id", None))
 
 
 @dataclass
@@ -2964,21 +3499,25 @@ class CustomPolicyTag:
     def as_dict(self) -> dict:
         """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomPolicyTag:
         """Deserializes the CustomPolicyTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -2998,45 +3537,55 @@ class DataPlaneEventDetails:
     def as_dict(self) -> dict:
         """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.event_type is not None: body['event_type'] = self.event_type.value
-        if self.executor_failures is not None: body['executor_failures'] = self.executor_failures
-        if self.host_id is not None: body['host_id'] = self.host_id
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.event_type is not None:
+            body["event_type"] = self.event_type.value
+        if self.executor_failures is not None:
+            body["executor_failures"] = self.executor_failures
+        if self.host_id is not None:
+            body["host_id"] = self.host_id
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.event_type is not None: body['event_type'] = self.event_type
-        if self.executor_failures is not None: body['executor_failures'] = self.executor_failures
-        if self.host_id is not None: body['host_id'] = self.host_id
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.event_type is not None:
+            body["event_type"] = self.event_type
+        if self.executor_failures is not None:
+            body["executor_failures"] = self.executor_failures
+        if self.host_id is not None:
+            body["host_id"] = self.host_id
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneEventDetails:
         """Deserializes the DataPlaneEventDetails from a dictionary."""
-        return cls(event_type=_enum(d, 'event_type', DataPlaneEventDetailsEventType),
-                   executor_failures=d.get('executor_failures', None),
-                   host_id=d.get('host_id', None),
-                   timestamp=d.get('timestamp', None))
+        return cls(
+            event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType),
+            executor_failures=d.get("executor_failures", None),
+            host_id=d.get("host_id", None),
+            timestamp=d.get("timestamp", None),
+        )
 
 
 class DataPlaneEventDetailsEventType(Enum):
     """"""
 
-    NODE_BLACKLISTED = 'NODE_BLACKLISTED'
-    NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED'
+    NODE_BLACKLISTED = "NODE_BLACKLISTED"
+    NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED"
 
 
 class DataSecurityMode(Enum):
     """Data security mode decides what data governance model to use when accessing data from a cluster.
-    
+
     The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
     choose the most appropriate access mode depending on your compute configuration. *
     `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
     Alias for `SINGLE_USER`.
-    
+
     The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
     multiple users sharing the cluster. Data governance features are not available in this mode. *
     `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
@@ -3045,26 +3594,26 @@ class DataSecurityMode(Enum):
     users. Cluster users are fully isolated so that they cannot see each other's data and
     credentials. Most data governance features are supported in this mode. But programming languages
     and cluster features might be limited.
-    
+
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
-    
+
     * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
     `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high
     concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy
     Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
     doesn’t have UC nor passthrough enabled."""
 
-    DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO'
-    DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED'
-    DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD'
-    LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH'
-    LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER'
-    LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD'
-    LEGACY_TABLE_ACL = 'LEGACY_TABLE_ACL'
-    NONE = 'NONE'
-    SINGLE_USER = 'SINGLE_USER'
-    USER_ISOLATION = 'USER_ISOLATION'
+    DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO"
+    DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED"
+    DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD"
+    LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH"
+    LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER"
+    LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD"
+    LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL"
+    NONE = "NONE"
+    SINGLE_USER = "SINGLE_USER"
+    USER_ISOLATION = "USER_ISOLATION"
 
 
 @dataclass
@@ -3075,19 +3624,21 @@ class DbfsStorageInfo:
     def as_dict(self) -> dict:
         """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbfsStorageInfo:
         """Deserializes the DbfsStorageInfo from a dictionary."""
-        return cls(destination=d.get('destination', None))
+        return cls(destination=d.get("destination", None))
 
 
 @dataclass
@@ -3098,19 +3649,21 @@ class DeleteCluster:
     def as_dict(self) -> dict:
         """Serializes the DeleteCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCluster:
         """Deserializes the DeleteCluster from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None))
+        return cls(cluster_id=d.get("cluster_id", None))
 
 
 @dataclass
@@ -3140,19 +3693,21 @@ class DeleteInstancePool:
     def as_dict(self) -> dict:
         """Serializes the DeleteInstancePool into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePool:
         """Deserializes the DeleteInstancePool from a dictionary."""
-        return cls(instance_pool_id=d.get('instance_pool_id', None))
+        return cls(instance_pool_id=d.get("instance_pool_id", None))
 
 
 @dataclass
@@ -3182,19 +3737,21 @@ class DeletePolicy:
     def as_dict(self) -> dict:
         """Serializes the DeletePolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePolicy:
         """Deserializes the DeletePolicy from a dictionary."""
-        return cls(policy_id=d.get('policy_id', None))
+        return cls(policy_id=d.get("policy_id", None))
 
 
 @dataclass
@@ -3244,21 +3801,28 @@ class DestroyContext:
     def as_dict(self) -> dict:
         """Serializes the DestroyContext into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.context_id is not None: body['contextId'] = self.context_id
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.context_id is not None:
+            body["contextId"] = self.context_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DestroyContext into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
-        if self.context_id is not None: body['contextId'] = self.context_id
+        if self.cluster_id is not None:
+            body["clusterId"] = self.cluster_id
+        if self.context_id is not None:
+            body["contextId"] = self.context_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DestroyContext:
         """Deserializes the DestroyContext from a dictionary."""
-        return cls(cluster_id=d.get('clusterId', None), context_id=d.get('contextId', None))
+        return cls(
+            cluster_id=d.get("clusterId", None),
+            context_id=d.get("contextId", None),
+        )
 
 
 @dataclass
@@ -3315,31 +3879,43 @@ class DiskSpec:
     def as_dict(self) -> dict:
         """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.disk_count is not None: body['disk_count'] = self.disk_count
-        if self.disk_iops is not None: body['disk_iops'] = self.disk_iops
-        if self.disk_size is not None: body['disk_size'] = self.disk_size
-        if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput
-        if self.disk_type: body['disk_type'] = self.disk_type.as_dict()
+        if self.disk_count is not None:
+            body["disk_count"] = self.disk_count
+        if self.disk_iops is not None:
+            body["disk_iops"] = self.disk_iops
+        if self.disk_size is not None:
+            body["disk_size"] = self.disk_size
+        if self.disk_throughput is not None:
+            body["disk_throughput"] = self.disk_throughput
+        if self.disk_type:
+            body["disk_type"] = self.disk_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DiskSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.disk_count is not None: body['disk_count'] = self.disk_count
-        if self.disk_iops is not None: body['disk_iops'] = self.disk_iops
-        if self.disk_size is not None: body['disk_size'] = self.disk_size
-        if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput
-        if self.disk_type: body['disk_type'] = self.disk_type
+        if self.disk_count is not None:
+            body["disk_count"] = self.disk_count
+        if self.disk_iops is not None:
+            body["disk_iops"] = self.disk_iops
+        if self.disk_size is not None:
+            body["disk_size"] = self.disk_size
+        if self.disk_throughput is not None:
+            body["disk_throughput"] = self.disk_throughput
+        if self.disk_type:
+            body["disk_type"] = self.disk_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DiskSpec:
         """Deserializes the DiskSpec from a dictionary."""
-        return cls(disk_count=d.get('disk_count', None),
-                   disk_iops=d.get('disk_iops', None),
-                   disk_size=d.get('disk_size', None),
-                   disk_throughput=d.get('disk_throughput', None),
-                   disk_type=_from_dict(d, 'disk_type', DiskType))
+        return cls(
+            disk_count=d.get("disk_count", None),
+            disk_iops=d.get("disk_iops", None),
+            disk_size=d.get("disk_size", None),
+            disk_throughput=d.get("disk_throughput", None),
+            disk_type=_from_dict(d, "disk_type", DiskType),
+        )
 
 
 @dataclass
@@ -3352,35 +3928,39 @@ def as_dict(self) -> dict:
         """Serializes the DiskType into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.azure_disk_volume_type is not None:
-            body['azure_disk_volume_type'] = self.azure_disk_volume_type.value
-        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value
+            body["azure_disk_volume_type"] = self.azure_disk_volume_type.value
+        if self.ebs_volume_type is not None:
+            body["ebs_volume_type"] = self.ebs_volume_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DiskType into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.azure_disk_volume_type is not None:
-            body['azure_disk_volume_type'] = self.azure_disk_volume_type
-        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type
+            body["azure_disk_volume_type"] = self.azure_disk_volume_type
+        if self.ebs_volume_type is not None:
+            body["ebs_volume_type"] = self.ebs_volume_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DiskType:
         """Deserializes the DiskType from a dictionary."""
-        return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType),
-                   ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType))
+        return cls(
+            azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType),
+            ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType),
+        )
 
 
 class DiskTypeAzureDiskVolumeType(Enum):
 
-    PREMIUM_LRS = 'PREMIUM_LRS'
-    STANDARD_LRS = 'STANDARD_LRS'
+    PREMIUM_LRS = "PREMIUM_LRS"
+    STANDARD_LRS = "STANDARD_LRS"
 
 
 class DiskTypeEbsVolumeType(Enum):
 
-    GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD'
-    THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD'
+    GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD"
+    THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD"
 
 
 @dataclass
@@ -3394,21 +3974,25 @@ class DockerBasicAuth:
     def as_dict(self) -> dict:
         """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.password is not None: body['password'] = self.password
-        if self.username is not None: body['username'] = self.username
+        if self.password is not None:
+            body["password"] = self.password
+        if self.username is not None:
+            body["username"] = self.username
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.password is not None: body['password'] = self.password
-        if self.username is not None: body['username'] = self.username
+        if self.password is not None:
+            body["password"] = self.password
+        if self.username is not None:
+            body["username"] = self.username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DockerBasicAuth:
         """Deserializes the DockerBasicAuth from a dictionary."""
-        return cls(password=d.get('password', None), username=d.get('username', None))
+        return cls(password=d.get("password", None), username=d.get("username", None))
 
 
 @dataclass
@@ -3421,28 +4005,35 @@ class DockerImage:
     def as_dict(self) -> dict:
         """Serializes the DockerImage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict()
-        if self.url is not None: body['url'] = self.url
+        if self.basic_auth:
+            body["basic_auth"] = self.basic_auth.as_dict()
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DockerImage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.basic_auth: body['basic_auth'] = self.basic_auth
-        if self.url is not None: body['url'] = self.url
+        if self.basic_auth:
+            body["basic_auth"] = self.basic_auth
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DockerImage:
         """Deserializes the DockerImage from a dictionary."""
-        return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None))
+        return cls(
+            basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth),
+            url=d.get("url", None),
+        )
 
 
 class EbsVolumeType(Enum):
     """The type of EBS volumes that will be launched with this cluster."""
 
-    GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD'
-    THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD'
+    GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD"
+    THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD"
 
 
 @dataclass
@@ -3632,116 +4223,172 @@ def as_dict(self) -> dict:
         """Serializes the EditCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
-        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode.value
+        if self.docker_image:
+            body["docker_image"] = self.docker_image.as_dict()
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine.value
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EditCluster into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
-        if self.docker_image: body['docker_image'] = self.docker_image
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode
+        if self.docker_image:
+            body["docker_image"] = self.docker_image
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditCluster:
         """Deserializes the EditCluster from a dictionary."""
-        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
-                   autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   cluster_id=d.get('cluster_id', None),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_name=d.get('cluster_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   is_single_node=d.get('is_single_node', None),
-                   kind=_enum(d, 'kind', Kind),
-                   node_type_id=d.get('node_type_id', None),
-                   num_workers=d.get('num_workers', None),
-                   policy_id=d.get('policy_id', None),
-                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
-                   single_user_name=d.get('single_user_name', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   spark_version=d.get('spark_version', None),
-                   ssh_public_keys=d.get('ssh_public_keys', None),
-                   use_ml_runtime=d.get('use_ml_runtime', None),
-                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+        return cls(
+            apply_policy_default_values=d.get("apply_policy_default_values", None),
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            autotermination_minutes=d.get("autotermination_minutes", None),
+            aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes),
+            cluster_id=d.get("cluster_id", None),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf),
+            cluster_name=d.get("cluster_name", None),
+            custom_tags=d.get("custom_tags", None),
+            data_security_mode=_enum(d, "data_security_mode", DataSecurityMode),
+            docker_image=_from_dict(d, "docker_image", DockerImage),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            is_single_node=d.get("is_single_node", None),
+            kind=_enum(d, "kind", Kind),
+            node_type_id=d.get("node_type_id", None),
+            num_workers=d.get("num_workers", None),
+            policy_id=d.get("policy_id", None),
+            runtime_engine=_enum(d, "runtime_engine", RuntimeEngine),
+            single_user_name=d.get("single_user_name", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            spark_version=d.get("spark_version", None),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+            use_ml_runtime=d.get("use_ml_runtime", None),
+            workload_type=_from_dict(d, "workload_type", WorkloadType),
+        )
 
 
 @dataclass
@@ -3802,39 +4449,53 @@ class EditInstancePool:
     def as_dict(self) -> dict:
         """Serializes the EditInstancePool into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditInstancePool:
         """Deserializes the EditInstancePool from a dictionary."""
-        return cls(custom_tags=d.get('custom_tags', None),
-                   idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   instance_pool_name=d.get('instance_pool_name', None),
-                   max_capacity=d.get('max_capacity', None),
-                   min_idle_instances=d.get('min_idle_instances', None),
-                   node_type_id=d.get('node_type_id', None))
+        return cls(
+            custom_tags=d.get("custom_tags", None),
+            idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None),
+            instance_pool_id=d.get("instance_pool_id", None),
+            instance_pool_name=d.get("instance_pool_name", None),
+            max_capacity=d.get("max_capacity", None),
+            min_idle_instances=d.get("min_idle_instances", None),
+            node_type_id=d.get("node_type_id", None),
+        )
 
 
 @dataclass
@@ -3900,42 +4561,58 @@ class EditPolicy:
     def as_dict(self) -> dict:
         """Serializes the EditPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
-        if self.name is not None: body['name'] = self.name
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = self.max_clusters_per_user
+        if self.name is not None:
+            body["name"] = self.name
         if self.policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+            body["policy_family_definition_overrides"] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EditPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.libraries: body['libraries'] = self.libraries
-        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
-        if self.name is not None: body['name'] = self.name
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = self.max_clusters_per_user
+        if self.name is not None:
+            body["name"] = self.name
         if self.policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+            body["policy_family_definition_overrides"] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPolicy:
         """Deserializes the EditPolicy from a dictionary."""
-        return cls(definition=d.get('definition', None),
-                   description=d.get('description', None),
-                   libraries=_repeated_dict(d, 'libraries', Library),
-                   max_clusters_per_user=d.get('max_clusters_per_user', None),
-                   name=d.get('name', None),
-                   policy_family_definition_overrides=d.get('policy_family_definition_overrides', None),
-                   policy_family_id=d.get('policy_family_id', None),
-                   policy_id=d.get('policy_id', None))
+        return cls(
+            definition=d.get("definition", None),
+            description=d.get("description", None),
+            libraries=_repeated_dict(d, "libraries", Library),
+            max_clusters_per_user=d.get("max_clusters_per_user", None),
+            name=d.get("name", None),
+            policy_family_definition_overrides=d.get("policy_family_definition_overrides", None),
+            policy_family_id=d.get("policy_family_id", None),
+            policy_id=d.get("policy_id", None),
+        )
 
 
 @dataclass
@@ -3988,21 +4665,28 @@ class EnforceClusterComplianceRequest:
     def as_dict(self) -> dict:
         """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceRequest:
         """Deserializes the EnforceClusterComplianceRequest from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None), validate_only=d.get('validate_only', None))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            validate_only=d.get("validate_only", None),
+        )
 
 
 @dataclass
@@ -4018,28 +4702,35 @@ class EnforceClusterComplianceResponse:
     def as_dict(self) -> dict:
         """Serializes the EnforceClusterComplianceResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.changes: body['changes'] = [v.as_dict() for v in self.changes]
-        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        if self.changes:
+            body["changes"] = [v.as_dict() for v in self.changes]
+        if self.has_changes is not None:
+            body["has_changes"] = self.has_changes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnforceClusterComplianceResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.changes: body['changes'] = self.changes
-        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        if self.changes:
+            body["changes"] = self.changes
+        if self.has_changes is not None:
+            body["has_changes"] = self.has_changes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceResponse:
         """Deserializes the EnforceClusterComplianceResponse from a dictionary."""
-        return cls(changes=_repeated_dict(d, 'changes', ClusterSettingsChange),
-                   has_changes=d.get('has_changes', None))
+        return cls(
+            changes=_repeated_dict(d, "changes", ClusterSettingsChange),
+            has_changes=d.get("has_changes", None),
+        )
 
 
 @dataclass
 class Environment:
     """The environment entity used to preserve serverless environment side panel and jobs' environment
-    for non-notebook task. In this minimal environment spec, only pip dependencies are supported."""
+    for non-notebook task. In this minimal environment spec, only pip dependencies are supported.
+    """
 
     client: str
     """Client version used by the environment The client is the user-facing environment of the runtime.
@@ -4057,21 +4748,28 @@ class Environment:
     def as_dict(self) -> dict:
         """Serializes the Environment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.client is not None: body['client'] = self.client
-        if self.dependencies: body['dependencies'] = [v for v in self.dependencies]
+        if self.client is not None:
+            body["client"] = self.client
+        if self.dependencies:
+            body["dependencies"] = [v for v in self.dependencies]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Environment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.client is not None: body['client'] = self.client
-        if self.dependencies: body['dependencies'] = self.dependencies
+        if self.client is not None:
+            body["client"] = self.client
+        if self.dependencies:
+            body["dependencies"] = self.dependencies
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Environment:
         """Deserializes the Environment from a dictionary."""
-        return cls(client=d.get('client', None), dependencies=d.get('dependencies', None))
+        return cls(
+            client=d.get("client", None),
+            dependencies=d.get("dependencies", None),
+        )
 
 
 @dataclass
@@ -4142,121 +4840,160 @@ class EventDetails:
     def as_dict(self) -> dict:
         """Serializes the EventDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.attributes: body['attributes'] = self.attributes.as_dict()
-        if self.cause is not None: body['cause'] = self.cause.value
-        if self.cluster_size: body['cluster_size'] = self.cluster_size.as_dict()
-        if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus
-        if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers
-        if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason
-        if self.disk_size is not None: body['disk_size'] = self.disk_size
-        if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message
+        if self.attributes:
+            body["attributes"] = self.attributes.as_dict()
+        if self.cause is not None:
+            body["cause"] = self.cause.value
+        if self.cluster_size:
+            body["cluster_size"] = self.cluster_size.as_dict()
+        if self.current_num_vcpus is not None:
+            body["current_num_vcpus"] = self.current_num_vcpus
+        if self.current_num_workers is not None:
+            body["current_num_workers"] = self.current_num_workers
+        if self.did_not_expand_reason is not None:
+            body["did_not_expand_reason"] = self.did_not_expand_reason
+        if self.disk_size is not None:
+            body["disk_size"] = self.disk_size
+        if self.driver_state_message is not None:
+            body["driver_state_message"] = self.driver_state_message
         if self.enable_termination_for_node_blocklisted is not None:
-            body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted
-        if self.free_space is not None: body['free_space'] = self.free_space
-        if self.init_scripts: body['init_scripts'] = self.init_scripts.as_dict()
-        if self.instance_id is not None: body['instance_id'] = self.instance_id
-        if self.job_run_name is not None: body['job_run_name'] = self.job_run_name
-        if self.previous_attributes: body['previous_attributes'] = self.previous_attributes.as_dict()
-        if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size.as_dict()
-        if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size
-        if self.reason: body['reason'] = self.reason.as_dict()
-        if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus
-        if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers
-        if self.user is not None: body['user'] = self.user
+            body["enable_termination_for_node_blocklisted"] = self.enable_termination_for_node_blocklisted
+        if self.free_space is not None:
+            body["free_space"] = self.free_space
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts.as_dict()
+        if self.instance_id is not None:
+            body["instance_id"] = self.instance_id
+        if self.job_run_name is not None:
+            body["job_run_name"] = self.job_run_name
+        if self.previous_attributes:
+            body["previous_attributes"] = self.previous_attributes.as_dict()
+        if self.previous_cluster_size:
+            body["previous_cluster_size"] = self.previous_cluster_size.as_dict()
+        if self.previous_disk_size is not None:
+            body["previous_disk_size"] = self.previous_disk_size
+        if self.reason:
+            body["reason"] = self.reason.as_dict()
+        if self.target_num_vcpus is not None:
+            body["target_num_vcpus"] = self.target_num_vcpus
+        if self.target_num_workers is not None:
+            body["target_num_workers"] = self.target_num_workers
+        if self.user is not None:
+            body["user"] = self.user
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EventDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.attributes: body['attributes'] = self.attributes
-        if self.cause is not None: body['cause'] = self.cause
-        if self.cluster_size: body['cluster_size'] = self.cluster_size
-        if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus
-        if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers
-        if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason
-        if self.disk_size is not None: body['disk_size'] = self.disk_size
-        if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message
+        if self.attributes:
+            body["attributes"] = self.attributes
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.cluster_size:
+            body["cluster_size"] = self.cluster_size
+        if self.current_num_vcpus is not None:
+            body["current_num_vcpus"] = self.current_num_vcpus
+        if self.current_num_workers is not None:
+            body["current_num_workers"] = self.current_num_workers
+        if self.did_not_expand_reason is not None:
+            body["did_not_expand_reason"] = self.did_not_expand_reason
+        if self.disk_size is not None:
+            body["disk_size"] = self.disk_size
+        if self.driver_state_message is not None:
+            body["driver_state_message"] = self.driver_state_message
         if self.enable_termination_for_node_blocklisted is not None:
-            body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted
-        if self.free_space is not None: body['free_space'] = self.free_space
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_id is not None: body['instance_id'] = self.instance_id
-        if self.job_run_name is not None: body['job_run_name'] = self.job_run_name
-        if self.previous_attributes: body['previous_attributes'] = self.previous_attributes
-        if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size
-        if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size
-        if self.reason: body['reason'] = self.reason
-        if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus
-        if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers
-        if self.user is not None: body['user'] = self.user
+            body["enable_termination_for_node_blocklisted"] = self.enable_termination_for_node_blocklisted
+        if self.free_space is not None:
+            body["free_space"] = self.free_space
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_id is not None:
+            body["instance_id"] = self.instance_id
+        if self.job_run_name is not None:
+            body["job_run_name"] = self.job_run_name
+        if self.previous_attributes:
+            body["previous_attributes"] = self.previous_attributes
+        if self.previous_cluster_size:
+            body["previous_cluster_size"] = self.previous_cluster_size
+        if self.previous_disk_size is not None:
+            body["previous_disk_size"] = self.previous_disk_size
+        if self.reason:
+            body["reason"] = self.reason
+        if self.target_num_vcpus is not None:
+            body["target_num_vcpus"] = self.target_num_vcpus
+        if self.target_num_workers is not None:
+            body["target_num_workers"] = self.target_num_workers
+        if self.user is not None:
+            body["user"] = self.user
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EventDetails:
         """Deserializes the EventDetails from a dictionary."""
-        return cls(attributes=_from_dict(d, 'attributes', ClusterAttributes),
-                   cause=_enum(d, 'cause', EventDetailsCause),
-                   cluster_size=_from_dict(d, 'cluster_size', ClusterSize),
-                   current_num_vcpus=d.get('current_num_vcpus', None),
-                   current_num_workers=d.get('current_num_workers', None),
-                   did_not_expand_reason=d.get('did_not_expand_reason', None),
-                   disk_size=d.get('disk_size', None),
-                   driver_state_message=d.get('driver_state_message', None),
-                   enable_termination_for_node_blocklisted=d.get('enable_termination_for_node_blocklisted',
-                                                                 None),
-                   free_space=d.get('free_space', None),
-                   init_scripts=_from_dict(d, 'init_scripts', InitScriptEventDetails),
-                   instance_id=d.get('instance_id', None),
-                   job_run_name=d.get('job_run_name', None),
-                   previous_attributes=_from_dict(d, 'previous_attributes', ClusterAttributes),
-                   previous_cluster_size=_from_dict(d, 'previous_cluster_size', ClusterSize),
-                   previous_disk_size=d.get('previous_disk_size', None),
-                   reason=_from_dict(d, 'reason', TerminationReason),
-                   target_num_vcpus=d.get('target_num_vcpus', None),
-                   target_num_workers=d.get('target_num_workers', None),
-                   user=d.get('user', None))
+        return cls(
+            attributes=_from_dict(d, "attributes", ClusterAttributes),
+            cause=_enum(d, "cause", EventDetailsCause),
+            cluster_size=_from_dict(d, "cluster_size", ClusterSize),
+            current_num_vcpus=d.get("current_num_vcpus", None),
+            current_num_workers=d.get("current_num_workers", None),
+            did_not_expand_reason=d.get("did_not_expand_reason", None),
+            disk_size=d.get("disk_size", None),
+            driver_state_message=d.get("driver_state_message", None),
+            enable_termination_for_node_blocklisted=d.get("enable_termination_for_node_blocklisted", None),
+            free_space=d.get("free_space", None),
+            init_scripts=_from_dict(d, "init_scripts", InitScriptEventDetails),
+            instance_id=d.get("instance_id", None),
+            job_run_name=d.get("job_run_name", None),
+            previous_attributes=_from_dict(d, "previous_attributes", ClusterAttributes),
+            previous_cluster_size=_from_dict(d, "previous_cluster_size", ClusterSize),
+            previous_disk_size=d.get("previous_disk_size", None),
+            reason=_from_dict(d, "reason", TerminationReason),
+            target_num_vcpus=d.get("target_num_vcpus", None),
+            target_num_workers=d.get("target_num_workers", None),
+            user=d.get("user", None),
+        )
 
 
 class EventDetailsCause(Enum):
     """The cause of a change in target size."""
 
-    AUTORECOVERY = 'AUTORECOVERY'
-    AUTOSCALE = 'AUTOSCALE'
-    REPLACE_BAD_NODES = 'REPLACE_BAD_NODES'
-    USER_REQUEST = 'USER_REQUEST'
+    AUTORECOVERY = "AUTORECOVERY"
+    AUTOSCALE = "AUTOSCALE"
+    REPLACE_BAD_NODES = "REPLACE_BAD_NODES"
+    USER_REQUEST = "USER_REQUEST"
 
 
 class EventType(Enum):
 
-    ADD_NODES_FAILED = 'ADD_NODES_FAILED'
-    AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE'
-    AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF'
-    AUTOSCALING_FAILED = 'AUTOSCALING_FAILED'
-    AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT'
-    CREATING = 'CREATING'
-    DBFS_DOWN = 'DBFS_DOWN'
-    DID_NOT_EXPAND_DISK = 'DID_NOT_EXPAND_DISK'
-    DRIVER_HEALTHY = 'DRIVER_HEALTHY'
-    DRIVER_NOT_RESPONDING = 'DRIVER_NOT_RESPONDING'
-    DRIVER_UNAVAILABLE = 'DRIVER_UNAVAILABLE'
-    EDITED = 'EDITED'
-    EXPANDED_DISK = 'EXPANDED_DISK'
-    FAILED_TO_EXPAND_DISK = 'FAILED_TO_EXPAND_DISK'
-    INIT_SCRIPTS_FINISHED = 'INIT_SCRIPTS_FINISHED'
-    INIT_SCRIPTS_STARTED = 'INIT_SCRIPTS_STARTED'
-    METASTORE_DOWN = 'METASTORE_DOWN'
-    NODES_LOST = 'NODES_LOST'
-    NODE_BLACKLISTED = 'NODE_BLACKLISTED'
-    NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED'
-    PINNED = 'PINNED'
-    RESIZING = 'RESIZING'
-    RESTARTING = 'RESTARTING'
-    RUNNING = 'RUNNING'
-    SPARK_EXCEPTION = 'SPARK_EXCEPTION'
-    STARTING = 'STARTING'
-    TERMINATING = 'TERMINATING'
-    UNPINNED = 'UNPINNED'
-    UPSIZE_COMPLETED = 'UPSIZE_COMPLETED'
+    ADD_NODES_FAILED = "ADD_NODES_FAILED"
+    AUTOMATIC_CLUSTER_UPDATE = "AUTOMATIC_CLUSTER_UPDATE"
+    AUTOSCALING_BACKOFF = "AUTOSCALING_BACKOFF"
+    AUTOSCALING_FAILED = "AUTOSCALING_FAILED"
+    AUTOSCALING_STATS_REPORT = "AUTOSCALING_STATS_REPORT"
+    CREATING = "CREATING"
+    DBFS_DOWN = "DBFS_DOWN"
+    DID_NOT_EXPAND_DISK = "DID_NOT_EXPAND_DISK"
+    DRIVER_HEALTHY = "DRIVER_HEALTHY"
+    DRIVER_NOT_RESPONDING = "DRIVER_NOT_RESPONDING"
+    DRIVER_UNAVAILABLE = "DRIVER_UNAVAILABLE"
+    EDITED = "EDITED"
+    EXPANDED_DISK = "EXPANDED_DISK"
+    FAILED_TO_EXPAND_DISK = "FAILED_TO_EXPAND_DISK"
+    INIT_SCRIPTS_FINISHED = "INIT_SCRIPTS_FINISHED"
+    INIT_SCRIPTS_STARTED = "INIT_SCRIPTS_STARTED"
+    METASTORE_DOWN = "METASTORE_DOWN"
+    NODES_LOST = "NODES_LOST"
+    NODE_BLACKLISTED = "NODE_BLACKLISTED"
+    NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED"
+    PINNED = "PINNED"
+    RESIZING = "RESIZING"
+    RESTARTING = "RESTARTING"
+    RUNNING = "RUNNING"
+    SPARK_EXCEPTION = "SPARK_EXCEPTION"
+    STARTING = "STARTING"
+    TERMINATING = "TERMINATING"
+    UNPINNED = "UNPINNED"
+    UPSIZE_COMPLETED = "UPSIZE_COMPLETED"
 
 
 @dataclass
@@ -4295,47 +5032,58 @@ class GcpAttributes:
     def as_dict(self) -> dict:
         """Serializes the GcpAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability.value
-        if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size
+        if self.availability is not None:
+            body["availability"] = self.availability.value
+        if self.boot_disk_size is not None:
+            body["boot_disk_size"] = self.boot_disk_size
         if self.google_service_account is not None:
-            body['google_service_account'] = self.google_service_account
-        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
+            body["google_service_account"] = self.google_service_account
+        if self.local_ssd_count is not None:
+            body["local_ssd_count"] = self.local_ssd_count
         if self.use_preemptible_executors is not None:
-            body['use_preemptible_executors'] = self.use_preemptible_executors
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+            body["use_preemptible_executors"] = self.use_preemptible_executors
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcpAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability
-        if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size
+        if self.availability is not None:
+            body["availability"] = self.availability
+        if self.boot_disk_size is not None:
+            body["boot_disk_size"] = self.boot_disk_size
         if self.google_service_account is not None:
-            body['google_service_account'] = self.google_service_account
-        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
+            body["google_service_account"] = self.google_service_account
+        if self.local_ssd_count is not None:
+            body["local_ssd_count"] = self.local_ssd_count
         if self.use_preemptible_executors is not None:
-            body['use_preemptible_executors'] = self.use_preemptible_executors
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+            body["use_preemptible_executors"] = self.use_preemptible_executors
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpAttributes:
         """Deserializes the GcpAttributes from a dictionary."""
-        return cls(availability=_enum(d, 'availability', GcpAvailability),
-                   boot_disk_size=d.get('boot_disk_size', None),
-                   google_service_account=d.get('google_service_account', None),
-                   local_ssd_count=d.get('local_ssd_count', None),
-                   use_preemptible_executors=d.get('use_preemptible_executors', None),
-                   zone_id=d.get('zone_id', None))
+        return cls(
+            availability=_enum(d, "availability", GcpAvailability),
+            boot_disk_size=d.get("boot_disk_size", None),
+            google_service_account=d.get("google_service_account", None),
+            local_ssd_count=d.get("local_ssd_count", None),
+            use_preemptible_executors=d.get("use_preemptible_executors", None),
+            zone_id=d.get("zone_id", None),
+        )
 
 
 class GcpAvailability(Enum):
     """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or
-    preemptible VMs with a fallback to on-demand VMs if the former is unavailable."""
+    preemptible VMs with a fallback to on-demand VMs if the former is unavailable.
+    """
 
-    ON_DEMAND_GCP = 'ON_DEMAND_GCP'
-    PREEMPTIBLE_GCP = 'PREEMPTIBLE_GCP'
-    PREEMPTIBLE_WITH_FALLBACK_GCP = 'PREEMPTIBLE_WITH_FALLBACK_GCP'
+    ON_DEMAND_GCP = "ON_DEMAND_GCP"
+    PREEMPTIBLE_GCP = "PREEMPTIBLE_GCP"
+    PREEMPTIBLE_WITH_FALLBACK_GCP = "PREEMPTIBLE_WITH_FALLBACK_GCP"
 
 
 @dataclass
@@ -4346,19 +5094,21 @@ class GcsStorageInfo:
     def as_dict(self) -> dict:
         """Serializes the GcsStorageInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcsStorageInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo:
         """Deserializes the GcsStorageInfo from a dictionary."""
-        return cls(destination=d.get('destination', None))
+        return cls(destination=d.get("destination", None))
 
 
 @dataclass
@@ -4375,21 +5125,28 @@ class GetClusterComplianceResponse:
     def as_dict(self) -> dict:
         """Serializes the GetClusterComplianceResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.violations: body['violations'] = self.violations
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetClusterComplianceResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.violations: body['violations'] = self.violations
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterComplianceResponse:
         """Deserializes the GetClusterComplianceResponse from a dictionary."""
-        return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None))
+        return cls(
+            is_compliant=d.get("is_compliant", None),
+            violations=d.get("violations", None),
+        )
 
 
 @dataclass
@@ -4400,19 +5157,21 @@ class GetClusterPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetClusterPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetClusterPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterPermissionLevelsResponse:
         """Deserializes the GetClusterPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', ClusterPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", ClusterPermissionsDescription))
 
 
 @dataclass
@@ -4423,20 +5182,21 @@ class GetClusterPolicyPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetClusterPolicyPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetClusterPolicyPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterPolicyPermissionLevelsResponse:
         """Deserializes the GetClusterPolicyPermissionLevelsResponse from a dictionary."""
-        return cls(
-            permission_levels=_repeated_dict(d, 'permission_levels', ClusterPolicyPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", ClusterPolicyPermissionsDescription))
 
 
 @dataclass
@@ -4468,44 +5228,60 @@ class GetEvents:
     def as_dict(self) -> dict:
         """Serializes the GetEvents into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.event_types: body['event_types'] = [v.value for v in self.event_types]
-        if self.limit is not None: body['limit'] = self.limit
-        if self.offset is not None: body['offset'] = self.offset
-        if self.order is not None: body['order'] = self.order.value
-        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.event_types:
+            body["event_types"] = [v.value for v in self.event_types]
+        if self.limit is not None:
+            body["limit"] = self.limit
+        if self.offset is not None:
+            body["offset"] = self.offset
+        if self.order is not None:
+            body["order"] = self.order.value
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetEvents into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.event_types: body['event_types'] = self.event_types
-        if self.limit is not None: body['limit'] = self.limit
-        if self.offset is not None: body['offset'] = self.offset
-        if self.order is not None: body['order'] = self.order
-        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.event_types:
+            body["event_types"] = self.event_types
+        if self.limit is not None:
+            body["limit"] = self.limit
+        if self.offset is not None:
+            body["offset"] = self.offset
+        if self.order is not None:
+            body["order"] = self.order
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetEvents:
         """Deserializes the GetEvents from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None),
-                   end_time=d.get('end_time', None),
-                   event_types=_repeated_enum(d, 'event_types', EventType),
-                   limit=d.get('limit', None),
-                   offset=d.get('offset', None),
-                   order=_enum(d, 'order', GetEventsOrder),
-                   start_time=d.get('start_time', None))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            end_time=d.get("end_time", None),
+            event_types=_repeated_enum(d, "event_types", EventType),
+            limit=d.get("limit", None),
+            offset=d.get("offset", None),
+            order=_enum(d, "order", GetEventsOrder),
+            start_time=d.get("start_time", None),
+        )
 
 
 class GetEventsOrder(Enum):
     """The order to list events in; either "ASC" or "DESC". Defaults to "DESC"."""
 
-    ASC = 'ASC'
-    DESC = 'DESC'
+    ASC = "ASC"
+    DESC = "DESC"
 
 
 @dataclass
@@ -4523,25 +5299,33 @@ class GetEventsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetEventsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.events: body['events'] = [v.as_dict() for v in self.events]
-        if self.next_page: body['next_page'] = self.next_page.as_dict()
-        if self.total_count is not None: body['total_count'] = self.total_count
+        if self.events:
+            body["events"] = [v.as_dict() for v in self.events]
+        if self.next_page:
+            body["next_page"] = self.next_page.as_dict()
+        if self.total_count is not None:
+            body["total_count"] = self.total_count
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetEventsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.events: body['events'] = self.events
-        if self.next_page: body['next_page'] = self.next_page
-        if self.total_count is not None: body['total_count'] = self.total_count
+        if self.events:
+            body["events"] = self.events
+        if self.next_page:
+            body["next_page"] = self.next_page
+        if self.total_count is not None:
+            body["total_count"] = self.total_count
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetEventsResponse:
         """Deserializes the GetEventsResponse from a dictionary."""
-        return cls(events=_repeated_dict(d, 'events', ClusterEvent),
-                   next_page=_from_dict(d, 'next_page', GetEvents),
-                   total_count=d.get('total_count', None))
+        return cls(
+            events=_repeated_dict(d, "events", ClusterEvent),
+            next_page=_from_dict(d, "next_page", GetEvents),
+            total_count=d.get("total_count", None),
+        )
 
 
 @dataclass
@@ -4632,74 +5416,108 @@ class GetInstancePool:
     def as_dict(self) -> dict:
         """Serializes the GetInstancePool into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.default_tags: body['default_tags'] = self.default_tags
-        if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.default_tags:
+            body["default_tags"] = self.default_tags
+        if self.disk_spec:
+            body["disk_spec"] = self.disk_spec.as_dict()
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
         if self.preloaded_docker_images:
-            body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
+            body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images]
         if self.preloaded_spark_versions:
-            body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
-        if self.state is not None: body['state'] = self.state.value
-        if self.stats: body['stats'] = self.stats.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
+            body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions]
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.stats:
+            body["stats"] = self.stats.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetInstancePool into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.default_tags: body['default_tags'] = self.default_tags
-        if self.disk_spec: body['disk_spec'] = self.disk_spec
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.default_tags:
+            body["default_tags"] = self.default_tags
+        if self.disk_spec:
+            body["disk_spec"] = self.disk_spec
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
-        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
-        if self.state is not None: body['state'] = self.state
-        if self.stats: body['stats'] = self.stats
-        if self.status: body['status'] = self.status
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.preloaded_docker_images:
+            body["preloaded_docker_images"] = self.preloaded_docker_images
+        if self.preloaded_spark_versions:
+            body["preloaded_spark_versions"] = self.preloaded_spark_versions
+        if self.state is not None:
+            body["state"] = self.state
+        if self.stats:
+            body["stats"] = self.stats
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetInstancePool:
         """Deserializes the GetInstancePool from a dictionary."""
-        return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
-                   custom_tags=d.get('custom_tags', None),
-                   default_tags=d.get('default_tags', None),
-                   disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes),
-                   idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   instance_pool_name=d.get('instance_pool_name', None),
-                   max_capacity=d.get('max_capacity', None),
-                   min_idle_instances=d.get('min_idle_instances', None),
-                   node_type_id=d.get('node_type_id', None),
-                   preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage),
-                   preloaded_spark_versions=d.get('preloaded_spark_versions', None),
-                   state=_enum(d, 'state', InstancePoolState),
-                   stats=_from_dict(d, 'stats', InstancePoolStats),
-                   status=_from_dict(d, 'status', InstancePoolStatus))
+        return cls(
+            aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes),
+            custom_tags=d.get("custom_tags", None),
+            default_tags=d.get("default_tags", None),
+            disk_spec=_from_dict(d, "disk_spec", DiskSpec),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes),
+            idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None),
+            instance_pool_id=d.get("instance_pool_id", None),
+            instance_pool_name=d.get("instance_pool_name", None),
+            max_capacity=d.get("max_capacity", None),
+            min_idle_instances=d.get("min_idle_instances", None),
+            node_type_id=d.get("node_type_id", None),
+            preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage),
+            preloaded_spark_versions=d.get("preloaded_spark_versions", None),
+            state=_enum(d, "state", InstancePoolState),
+            stats=_from_dict(d, "stats", InstancePoolStats),
+            status=_from_dict(d, "status", InstancePoolStatus),
+        )
 
 
 @dataclass
@@ -4710,20 +5528,21 @@ class GetInstancePoolPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetInstancePoolPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetInstancePoolPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetInstancePoolPermissionLevelsResponse:
         """Deserializes the GetInstancePoolPermissionLevelsResponse from a dictionary."""
-        return cls(
-            permission_levels=_repeated_dict(d, 'permission_levels', InstancePoolPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", InstancePoolPermissionsDescription))
 
 
 @dataclass
@@ -4734,19 +5553,21 @@ class GetSparkVersionsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetSparkVersionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.versions: body['versions'] = [v.as_dict() for v in self.versions]
+        if self.versions:
+            body["versions"] = [v.as_dict() for v in self.versions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetSparkVersionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.versions: body['versions'] = self.versions
+        if self.versions:
+            body["versions"] = self.versions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetSparkVersionsResponse:
         """Deserializes the GetSparkVersionsResponse from a dictionary."""
-        return cls(versions=_repeated_dict(d, 'versions', SparkVersion))
+        return cls(versions=_repeated_dict(d, "versions", SparkVersion))
 
 
 @dataclass
@@ -4774,28 +5595,38 @@ class GlobalInitScriptCreateRequest:
     def as_dict(self) -> dict:
         """Serializes the GlobalInitScriptCreateRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script is not None: body['script'] = self.script
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script is not None:
+            body["script"] = self.script
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script is not None: body['script'] = self.script
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script is not None:
+            body["script"] = self.script
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptCreateRequest:
         """Deserializes the GlobalInitScriptCreateRequest from a dictionary."""
-        return cls(enabled=d.get('enabled', None),
-                   name=d.get('name', None),
-                   position=d.get('position', None),
-                   script=d.get('script', None))
+        return cls(
+            enabled=d.get("enabled", None),
+            name=d.get("name", None),
+            position=d.get("position", None),
+            script=d.get("script", None),
+        )
 
 
 @dataclass
@@ -4828,40 +5659,58 @@ class GlobalInitScriptDetails:
     def as_dict(self) -> dict:
         """Serializes the GlobalInitScriptDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script_id is not None: body['script_id'] = self.script_id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GlobalInitScriptDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script_id is not None: body['script_id'] = self.script_id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetails:
         """Deserializes the GlobalInitScriptDetails from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   enabled=d.get('enabled', None),
-                   name=d.get('name', None),
-                   position=d.get('position', None),
-                   script_id=d.get('script_id', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            enabled=d.get("enabled", None),
+            name=d.get("name", None),
+            position=d.get("position", None),
+            script_id=d.get("script_id", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -4897,43 +5746,63 @@ class GlobalInitScriptDetailsWithContent:
     def as_dict(self) -> dict:
         """Serializes the GlobalInitScriptDetailsWithContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script is not None: body['script'] = self.script
-        if self.script_id is not None: body['script_id'] = self.script_id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script is not None:
+            body["script"] = self.script
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GlobalInitScriptDetailsWithContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script is not None: body['script'] = self.script
-        if self.script_id is not None: body['script_id'] = self.script_id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script is not None:
+            body["script"] = self.script
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetailsWithContent:
         """Deserializes the GlobalInitScriptDetailsWithContent from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   enabled=d.get('enabled', None),
-                   name=d.get('name', None),
-                   position=d.get('position', None),
-                   script=d.get('script', None),
-                   script_id=d.get('script_id', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            enabled=d.get("enabled", None),
+            name=d.get("name", None),
+            position=d.get("position", None),
+            script=d.get("script", None),
+            script_id=d.get("script_id", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -4964,31 +5833,43 @@ class GlobalInitScriptUpdateRequest:
     def as_dict(self) -> dict:
         """Serializes the GlobalInitScriptUpdateRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script is not None: body['script'] = self.script
-        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script is not None:
+            body["script"] = self.script
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.script is not None: body['script'] = self.script
-        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.script is not None:
+            body["script"] = self.script
+        if self.script_id is not None:
+            body["script_id"] = self.script_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptUpdateRequest:
         """Deserializes the GlobalInitScriptUpdateRequest from a dictionary."""
-        return cls(enabled=d.get('enabled', None),
-                   name=d.get('name', None),
-                   position=d.get('position', None),
-                   script=d.get('script', None),
-                   script_id=d.get('script_id', None))
+        return cls(
+            enabled=d.get("enabled", None),
+            name=d.get("name", None),
+            position=d.get("position", None),
+            script=d.get("script", None),
+            script_id=d.get("script_id", None),
+        )
 
 
 @dataclass
@@ -5005,25 +5886,33 @@ class InitScriptEventDetails:
     def as_dict(self) -> dict:
         """Serializes the InitScriptEventDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster: body['cluster'] = [v.as_dict() for v in self.cluster]
-        if self.global_: body['global'] = [v.as_dict() for v in self.global_]
-        if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node
+        if self.cluster:
+            body["cluster"] = [v.as_dict() for v in self.cluster]
+        if self.global_:
+            body["global"] = [v.as_dict() for v in self.global_]
+        if self.reported_for_node is not None:
+            body["reported_for_node"] = self.reported_for_node
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InitScriptEventDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster: body['cluster'] = self.cluster
-        if self.global_: body['global'] = self.global_
-        if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node
+        if self.cluster:
+            body["cluster"] = self.cluster
+        if self.global_:
+            body["global"] = self.global_
+        if self.reported_for_node is not None:
+            body["reported_for_node"] = self.reported_for_node
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptEventDetails:
         """Deserializes the InitScriptEventDetails from a dictionary."""
-        return cls(cluster=_repeated_dict(d, 'cluster', InitScriptInfoAndExecutionDetails),
-                   global_=_repeated_dict(d, 'global', InitScriptInfoAndExecutionDetails),
-                   reported_for_node=d.get('reported_for_node', None))
+        return cls(
+            cluster=_repeated_dict(d, "cluster", InitScriptInfoAndExecutionDetails),
+            global_=_repeated_dict(d, "global", InitScriptInfoAndExecutionDetails),
+            reported_for_node=d.get("reported_for_node", None),
+        )
 
 
 @dataclass
@@ -5040,38 +5929,44 @@ class InitScriptExecutionDetails:
     def as_dict(self) -> dict:
         """Serializes the InitScriptExecutionDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
         if self.execution_duration_seconds is not None:
-            body['execution_duration_seconds'] = self.execution_duration_seconds
-        if self.status is not None: body['status'] = self.status.value
+            body["execution_duration_seconds"] = self.execution_duration_seconds
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InitScriptExecutionDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
         if self.execution_duration_seconds is not None:
-            body['execution_duration_seconds'] = self.execution_duration_seconds
-        if self.status is not None: body['status'] = self.status
+            body["execution_duration_seconds"] = self.execution_duration_seconds
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptExecutionDetails:
         """Deserializes the InitScriptExecutionDetails from a dictionary."""
-        return cls(error_message=d.get('error_message', None),
-                   execution_duration_seconds=d.get('execution_duration_seconds', None),
-                   status=_enum(d, 'status', InitScriptExecutionDetailsStatus))
+        return cls(
+            error_message=d.get("error_message", None),
+            execution_duration_seconds=d.get("execution_duration_seconds", None),
+            status=_enum(d, "status", InitScriptExecutionDetailsStatus),
+        )
 
 
 class InitScriptExecutionDetailsStatus(Enum):
     """The current status of the script"""
 
-    FAILED_EXECUTION = 'FAILED_EXECUTION'
-    FAILED_FETCH = 'FAILED_FETCH'
-    NOT_EXECUTED = 'NOT_EXECUTED'
-    SKIPPED = 'SKIPPED'
-    SUCCEEDED = 'SUCCEEDED'
-    UNKNOWN = 'UNKNOWN'
+    FAILED_EXECUTION = "FAILED_EXECUTION"
+    FAILED_FETCH = "FAILED_FETCH"
+    NOT_EXECUTED = "NOT_EXECUTED"
+    SKIPPED = "SKIPPED"
+    SUCCEEDED = "SUCCEEDED"
+    UNKNOWN = "UNKNOWN"
 
 
 @dataclass
@@ -5108,37 +6003,53 @@ class InitScriptInfo:
     def as_dict(self) -> dict:
         """Serializes the InitScriptInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.abfss: body['abfss'] = self.abfss.as_dict()
-        if self.dbfs: body['dbfs'] = self.dbfs.as_dict()
-        if self.file: body['file'] = self.file.as_dict()
-        if self.gcs: body['gcs'] = self.gcs.as_dict()
-        if self.s3: body['s3'] = self.s3.as_dict()
-        if self.volumes: body['volumes'] = self.volumes.as_dict()
-        if self.workspace: body['workspace'] = self.workspace.as_dict()
+        if self.abfss:
+            body["abfss"] = self.abfss.as_dict()
+        if self.dbfs:
+            body["dbfs"] = self.dbfs.as_dict()
+        if self.file:
+            body["file"] = self.file.as_dict()
+        if self.gcs:
+            body["gcs"] = self.gcs.as_dict()
+        if self.s3:
+            body["s3"] = self.s3.as_dict()
+        if self.volumes:
+            body["volumes"] = self.volumes.as_dict()
+        if self.workspace:
+            body["workspace"] = self.workspace.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InitScriptInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.abfss: body['abfss'] = self.abfss
-        if self.dbfs: body['dbfs'] = self.dbfs
-        if self.file: body['file'] = self.file
-        if self.gcs: body['gcs'] = self.gcs
-        if self.s3: body['s3'] = self.s3
-        if self.volumes: body['volumes'] = self.volumes
-        if self.workspace: body['workspace'] = self.workspace
+        if self.abfss:
+            body["abfss"] = self.abfss
+        if self.dbfs:
+            body["dbfs"] = self.dbfs
+        if self.file:
+            body["file"] = self.file
+        if self.gcs:
+            body["gcs"] = self.gcs
+        if self.s3:
+            body["s3"] = self.s3
+        if self.volumes:
+            body["volumes"] = self.volumes
+        if self.workspace:
+            body["workspace"] = self.workspace
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptInfo:
         """Deserializes the InitScriptInfo from a dictionary."""
-        return cls(abfss=_from_dict(d, 'abfss', Adlsgen2Info),
-                   dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
-                   file=_from_dict(d, 'file', LocalFileInfo),
-                   gcs=_from_dict(d, 'gcs', GcsStorageInfo),
-                   s3=_from_dict(d, 's3', S3StorageInfo),
-                   volumes=_from_dict(d, 'volumes', VolumesStorageInfo),
-                   workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo))
+        return cls(
+            abfss=_from_dict(d, "abfss", Adlsgen2Info),
+            dbfs=_from_dict(d, "dbfs", DbfsStorageInfo),
+            file=_from_dict(d, "file", LocalFileInfo),
+            gcs=_from_dict(d, "gcs", GcsStorageInfo),
+            s3=_from_dict(d, "s3", S3StorageInfo),
+            volumes=_from_dict(d, "volumes", VolumesStorageInfo),
+            workspace=_from_dict(d, "workspace", WorkspaceStorageInfo),
+        )
 
 
 @dataclass
@@ -5152,22 +6063,28 @@ class InitScriptInfoAndExecutionDetails:
     def as_dict(self) -> dict:
         """Serializes the InitScriptInfoAndExecutionDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.execution_details: body['execution_details'] = self.execution_details.as_dict()
-        if self.script: body['script'] = self.script.as_dict()
+        if self.execution_details:
+            body["execution_details"] = self.execution_details.as_dict()
+        if self.script:
+            body["script"] = self.script.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InitScriptInfoAndExecutionDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.execution_details: body['execution_details'] = self.execution_details
-        if self.script: body['script'] = self.script
+        if self.execution_details:
+            body["execution_details"] = self.execution_details
+        if self.script:
+            body["script"] = self.script
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptInfoAndExecutionDetails:
         """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary."""
-        return cls(execution_details=_from_dict(d, 'execution_details', InitScriptExecutionDetails),
-                   script=_from_dict(d, 'script', InitScriptInfo))
+        return cls(
+            execution_details=_from_dict(d, "execution_details", InitScriptExecutionDetails),
+            script=_from_dict(d, "script", InitScriptInfo),
+        )
 
 
 @dataclass
@@ -5181,21 +6098,28 @@ class InstallLibraries:
     def as_dict(self) -> dict:
         """Serializes the InstallLibraries into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.libraries: body['libraries'] = self.libraries
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.libraries:
+            body["libraries"] = self.libraries
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallLibraries:
         """Deserializes the InstallLibraries from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated_dict(d, 'libraries', Library))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            libraries=_repeated_dict(d, "libraries", Library),
+        )
 
 
 @dataclass
@@ -5234,30 +6158,38 @@ class InstancePoolAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlRequest:
         """Deserializes the InstancePoolAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -5280,33 +6212,43 @@ class InstancePoolAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlResponse:
         """Deserializes the InstancePoolAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', InstancePoolPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", InstancePoolPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -5397,74 +6339,108 @@ class InstancePoolAndStats:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolAndStats into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.default_tags: body['default_tags'] = self.default_tags
-        if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict()
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.default_tags:
+            body["default_tags"] = self.default_tags
+        if self.disk_spec:
+            body["disk_spec"] = self.disk_spec.as_dict()
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
         if self.preloaded_docker_images:
-            body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images]
+            body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images]
         if self.preloaded_spark_versions:
-            body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
-        if self.state is not None: body['state'] = self.state.value
-        if self.stats: body['stats'] = self.stats.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
+            body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions]
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.stats:
+            body["stats"] = self.stats.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolAndStats into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.default_tags: body['default_tags'] = self.default_tags
-        if self.disk_spec: body['disk_spec'] = self.disk_spec
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.default_tags:
+            body["default_tags"] = self.default_tags
+        if self.disk_spec:
+            body["disk_spec"] = self.disk_spec
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
         if self.idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
-        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
-        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
-        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
-        if self.state is not None: body['state'] = self.state
-        if self.stats: body['stats'] = self.stats
-        if self.status: body['status'] = self.status
+            body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.instance_pool_name is not None:
+            body["instance_pool_name"] = self.instance_pool_name
+        if self.max_capacity is not None:
+            body["max_capacity"] = self.max_capacity
+        if self.min_idle_instances is not None:
+            body["min_idle_instances"] = self.min_idle_instances
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.preloaded_docker_images:
+            body["preloaded_docker_images"] = self.preloaded_docker_images
+        if self.preloaded_spark_versions:
+            body["preloaded_spark_versions"] = self.preloaded_spark_versions
+        if self.state is not None:
+            body["state"] = self.state
+        if self.stats:
+            body["stats"] = self.stats
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAndStats:
         """Deserializes the InstancePoolAndStats from a dictionary."""
-        return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes),
-                   custom_tags=d.get('custom_tags', None),
-                   default_tags=d.get('default_tags', None),
-                   disk_spec=_from_dict(d, 'disk_spec', DiskSpec),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes),
-                   idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   instance_pool_name=d.get('instance_pool_name', None),
-                   max_capacity=d.get('max_capacity', None),
-                   min_idle_instances=d.get('min_idle_instances', None),
-                   node_type_id=d.get('node_type_id', None),
-                   preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage),
-                   preloaded_spark_versions=d.get('preloaded_spark_versions', None),
-                   state=_enum(d, 'state', InstancePoolState),
-                   stats=_from_dict(d, 'stats', InstancePoolStats),
-                   status=_from_dict(d, 'status', InstancePoolStatus))
+        return cls(
+            aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes),
+            custom_tags=d.get("custom_tags", None),
+            default_tags=d.get("default_tags", None),
+            disk_spec=_from_dict(d, "disk_spec", DiskSpec),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes),
+            idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None),
+            instance_pool_id=d.get("instance_pool_id", None),
+            instance_pool_name=d.get("instance_pool_name", None),
+            max_capacity=d.get("max_capacity", None),
+            min_idle_instances=d.get("min_idle_instances", None),
+            node_type_id=d.get("node_type_id", None),
+            preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage),
+            preloaded_spark_versions=d.get("preloaded_spark_versions", None),
+            state=_enum(d, "state", InstancePoolState),
+            stats=_from_dict(d, "stats", InstancePoolStats),
+            status=_from_dict(d, "status", InstancePoolStatus),
+        )
 
 
 @dataclass
@@ -5497,36 +6473,43 @@ class InstancePoolAwsAttributes:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolAwsAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability.value
+        if self.availability is not None:
+            body["availability"] = self.availability.value
         if self.spot_bid_price_percent is not None:
-            body['spot_bid_price_percent'] = self.spot_bid_price_percent
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+            body["spot_bid_price_percent"] = self.spot_bid_price_percent
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolAwsAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability
+        if self.availability is not None:
+            body["availability"] = self.availability
         if self.spot_bid_price_percent is not None:
-            body['spot_bid_price_percent'] = self.spot_bid_price_percent
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+            body["spot_bid_price_percent"] = self.spot_bid_price_percent
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAwsAttributes:
         """Deserializes the InstancePoolAwsAttributes from a dictionary."""
-        return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability),
-                   spot_bid_price_percent=d.get('spot_bid_price_percent', None),
-                   zone_id=d.get('zone_id', None))
+        return cls(
+            availability=_enum(d, "availability", InstancePoolAwsAttributesAvailability),
+            spot_bid_price_percent=d.get("spot_bid_price_percent", None),
+            zone_id=d.get("zone_id", None),
+        )
 
 
 class InstancePoolAwsAttributesAvailability(Enum):
     """Availability type used for the spot nodes.
-    
-    The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability"""
 
-    ON_DEMAND = 'ON_DEMAND'
-    SPOT = 'SPOT'
+    The default value is defined by InstancePoolConf.instancePoolDefaultAwsAvailability
+    """
+
+    ON_DEMAND = "ON_DEMAND"
+    SPOT = "SPOT"
 
 
 @dataclass
@@ -5543,31 +6526,38 @@ class InstancePoolAzureAttributes:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolAzureAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability.value
-        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        if self.availability is not None:
+            body["availability"] = self.availability.value
+        if self.spot_bid_max_price is not None:
+            body["spot_bid_max_price"] = self.spot_bid_max_price
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolAzureAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.availability is not None: body['availability'] = self.availability
-        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        if self.availability is not None:
+            body["availability"] = self.availability
+        if self.spot_bid_max_price is not None:
+            body["spot_bid_max_price"] = self.spot_bid_max_price
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAzureAttributes:
         """Deserializes the InstancePoolAzureAttributes from a dictionary."""
-        return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability),
-                   spot_bid_max_price=d.get('spot_bid_max_price', None))
+        return cls(
+            availability=_enum(d, "availability", InstancePoolAzureAttributesAvailability),
+            spot_bid_max_price=d.get("spot_bid_max_price", None),
+        )
 
 
 class InstancePoolAzureAttributesAvailability(Enum):
     """Shows the Availability type used for the spot nodes.
-    
-    The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability"""
 
-    ON_DEMAND_AZURE = 'ON_DEMAND_AZURE'
-    SPOT_AZURE = 'SPOT_AZURE'
+    The default value is defined by InstancePoolConf.instancePoolDefaultAzureAvailability
+    """
+
+    ON_DEMAND_AZURE = "ON_DEMAND_AZURE"
+    SPOT_AZURE = "SPOT_AZURE"
 
 
 @dataclass
@@ -5600,25 +6590,33 @@ class InstancePoolGcpAttributes:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolGcpAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability.value
-        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        if self.gcp_availability is not None:
+            body["gcp_availability"] = self.gcp_availability.value
+        if self.local_ssd_count is not None:
+            body["local_ssd_count"] = self.local_ssd_count
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolGcpAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability
-        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
-        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        if self.gcp_availability is not None:
+            body["gcp_availability"] = self.gcp_availability
+        if self.local_ssd_count is not None:
+            body["local_ssd_count"] = self.local_ssd_count
+        if self.zone_id is not None:
+            body["zone_id"] = self.zone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolGcpAttributes:
         """Deserializes the InstancePoolGcpAttributes from a dictionary."""
-        return cls(gcp_availability=_enum(d, 'gcp_availability', GcpAvailability),
-                   local_ssd_count=d.get('local_ssd_count', None),
-                   zone_id=d.get('zone_id', None))
+        return cls(
+            gcp_availability=_enum(d, "gcp_availability", GcpAvailability),
+            local_ssd_count=d.get("local_ssd_count", None),
+            zone_id=d.get("zone_id", None),
+        )
 
 
 @dataclass
@@ -5633,32 +6631,40 @@ class InstancePoolPermission:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermission:
         """Deserializes the InstancePoolPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel),
+        )
 
 
 class InstancePoolPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_ATTACH_TO = 'CAN_ATTACH_TO'
-    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_ATTACH_TO = "CAN_ATTACH_TO"
+    CAN_MANAGE = "CAN_MANAGE"
 
 
 @dataclass
@@ -5673,26 +6679,32 @@ def as_dict(self) -> dict:
         """Serializes the InstancePoolPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissions:
         """Deserializes the InstancePoolPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      InstancePoolAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -5705,22 +6717,28 @@ class InstancePoolPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsDescription:
         """Deserializes the InstancePoolPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel),
+        )
 
 
 @dataclass
@@ -5734,31 +6752,35 @@ def as_dict(self) -> dict:
         """Serializes the InstancePoolPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsRequest:
         """Deserializes the InstancePoolPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      InstancePoolAccessControlRequest),
-                   instance_pool_id=d.get('instance_pool_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlRequest),
+            instance_pool_id=d.get("instance_pool_id", None),
+        )
 
 
 class InstancePoolState(Enum):
     """Current state of the instance pool."""
 
-    ACTIVE = 'ACTIVE'
-    DELETED = 'DELETED'
-    STOPPED = 'STOPPED'
+    ACTIVE = "ACTIVE"
+    DELETED = "DELETED"
+    STOPPED = "STOPPED"
 
 
 @dataclass
@@ -5778,28 +6800,38 @@ class InstancePoolStats:
     def as_dict(self) -> dict:
         """Serializes the InstancePoolStats into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.idle_count is not None: body['idle_count'] = self.idle_count
-        if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count
-        if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count
-        if self.used_count is not None: body['used_count'] = self.used_count
+        if self.idle_count is not None:
+            body["idle_count"] = self.idle_count
+        if self.pending_idle_count is not None:
+            body["pending_idle_count"] = self.pending_idle_count
+        if self.pending_used_count is not None:
+            body["pending_used_count"] = self.pending_used_count
+        if self.used_count is not None:
+            body["used_count"] = self.used_count
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolStats into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.idle_count is not None: body['idle_count'] = self.idle_count
-        if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count
-        if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count
-        if self.used_count is not None: body['used_count'] = self.used_count
+        if self.idle_count is not None:
+            body["idle_count"] = self.idle_count
+        if self.pending_idle_count is not None:
+            body["pending_idle_count"] = self.pending_idle_count
+        if self.pending_used_count is not None:
+            body["pending_used_count"] = self.pending_used_count
+        if self.used_count is not None:
+            body["used_count"] = self.used_count
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolStats:
         """Deserializes the InstancePoolStats from a dictionary."""
-        return cls(idle_count=d.get('idle_count', None),
-                   pending_idle_count=d.get('pending_idle_count', None),
-                   pending_used_count=d.get('pending_used_count', None),
-                   used_count=d.get('used_count', None))
+        return cls(
+            idle_count=d.get("idle_count", None),
+            pending_idle_count=d.get("pending_idle_count", None),
+            pending_used_count=d.get("pending_used_count", None),
+            used_count=d.get("used_count", None),
+        )
 
 
 @dataclass
@@ -5813,19 +6845,20 @@ def as_dict(self) -> dict:
         """Serializes the InstancePoolStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.pending_instance_errors:
-            body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors]
+            body["pending_instance_errors"] = [v.as_dict() for v in self.pending_instance_errors]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstancePoolStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.pending_instance_errors: body['pending_instance_errors'] = self.pending_instance_errors
+        if self.pending_instance_errors:
+            body["pending_instance_errors"] = self.pending_instance_errors
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolStatus:
         """Deserializes the InstancePoolStatus from a dictionary."""
-        return cls(pending_instance_errors=_repeated_dict(d, 'pending_instance_errors', PendingInstanceError))
+        return cls(pending_instance_errors=_repeated_dict(d, "pending_instance_errors", PendingInstanceError))
 
 
 @dataclass
@@ -5851,45 +6884,51 @@ class InstanceProfile:
     def as_dict(self) -> dict:
         """Serializes the InstanceProfile into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.iam_role_arn is not None:
+            body["iam_role_arn"] = self.iam_role_arn
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.is_meta_instance_profile is not None:
-            body['is_meta_instance_profile'] = self.is_meta_instance_profile
+            body["is_meta_instance_profile"] = self.is_meta_instance_profile
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstanceProfile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.iam_role_arn is not None:
+            body["iam_role_arn"] = self.iam_role_arn
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.is_meta_instance_profile is not None:
-            body['is_meta_instance_profile'] = self.is_meta_instance_profile
+            body["is_meta_instance_profile"] = self.is_meta_instance_profile
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstanceProfile:
         """Deserializes the InstanceProfile from a dictionary."""
-        return cls(iam_role_arn=d.get('iam_role_arn', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   is_meta_instance_profile=d.get('is_meta_instance_profile', None))
+        return cls(
+            iam_role_arn=d.get("iam_role_arn", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            is_meta_instance_profile=d.get("is_meta_instance_profile", None),
+        )
 
 
 class Kind(Enum):
     """The kind of compute described by this compute specification.
-    
+
     Depending on `kind`, different validations and default values will be applied.
-    
+
     The first usage of this value is for the simple cluster form where it sets `kind =
     CLASSIC_PREVIEW`."""
 
-    CLASSIC_PREVIEW = 'CLASSIC_PREVIEW'
+    CLASSIC_PREVIEW = "CLASSIC_PREVIEW"
 
 
 class Language(Enum):
 
-    PYTHON = 'python'
-    SCALA = 'scala'
-    SQL = 'sql'
+    PYTHON = "python"
+    SCALA = "scala"
+    SQL = "sql"
 
 
 @dataclass
@@ -5930,37 +6969,53 @@ class Library:
     def as_dict(self) -> dict:
         """Serializes the Library into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cran: body['cran'] = self.cran.as_dict()
-        if self.egg is not None: body['egg'] = self.egg
-        if self.jar is not None: body['jar'] = self.jar
-        if self.maven: body['maven'] = self.maven.as_dict()
-        if self.pypi: body['pypi'] = self.pypi.as_dict()
-        if self.requirements is not None: body['requirements'] = self.requirements
-        if self.whl is not None: body['whl'] = self.whl
+        if self.cran:
+            body["cran"] = self.cran.as_dict()
+        if self.egg is not None:
+            body["egg"] = self.egg
+        if self.jar is not None:
+            body["jar"] = self.jar
+        if self.maven:
+            body["maven"] = self.maven.as_dict()
+        if self.pypi:
+            body["pypi"] = self.pypi.as_dict()
+        if self.requirements is not None:
+            body["requirements"] = self.requirements
+        if self.whl is not None:
+            body["whl"] = self.whl
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Library into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cran: body['cran'] = self.cran
-        if self.egg is not None: body['egg'] = self.egg
-        if self.jar is not None: body['jar'] = self.jar
-        if self.maven: body['maven'] = self.maven
-        if self.pypi: body['pypi'] = self.pypi
-        if self.requirements is not None: body['requirements'] = self.requirements
-        if self.whl is not None: body['whl'] = self.whl
+        if self.cran:
+            body["cran"] = self.cran
+        if self.egg is not None:
+            body["egg"] = self.egg
+        if self.jar is not None:
+            body["jar"] = self.jar
+        if self.maven:
+            body["maven"] = self.maven
+        if self.pypi:
+            body["pypi"] = self.pypi
+        if self.requirements is not None:
+            body["requirements"] = self.requirements
+        if self.whl is not None:
+            body["whl"] = self.whl
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Library:
         """Deserializes the Library from a dictionary."""
-        return cls(cran=_from_dict(d, 'cran', RCranLibrary),
-                   egg=d.get('egg', None),
-                   jar=d.get('jar', None),
-                   maven=_from_dict(d, 'maven', MavenLibrary),
-                   pypi=_from_dict(d, 'pypi', PythonPyPiLibrary),
-                   requirements=d.get('requirements', None),
-                   whl=d.get('whl', None))
+        return cls(
+            cran=_from_dict(d, "cran", RCranLibrary),
+            egg=d.get("egg", None),
+            jar=d.get("jar", None),
+            maven=_from_dict(d, "maven", MavenLibrary),
+            pypi=_from_dict(d, "pypi", PythonPyPiLibrary),
+            requirements=d.get("requirements", None),
+            whl=d.get("whl", None),
+        )
 
 
 @dataclass
@@ -5983,42 +7038,50 @@ def as_dict(self) -> dict:
         """Serializes the LibraryFullStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.is_library_for_all_clusters is not None:
-            body['is_library_for_all_clusters'] = self.is_library_for_all_clusters
-        if self.library: body['library'] = self.library.as_dict()
-        if self.messages: body['messages'] = [v for v in self.messages]
-        if self.status is not None: body['status'] = self.status.value
+            body["is_library_for_all_clusters"] = self.is_library_for_all_clusters
+        if self.library:
+            body["library"] = self.library.as_dict()
+        if self.messages:
+            body["messages"] = [v for v in self.messages]
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LibraryFullStatus into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.is_library_for_all_clusters is not None:
-            body['is_library_for_all_clusters'] = self.is_library_for_all_clusters
-        if self.library: body['library'] = self.library
-        if self.messages: body['messages'] = self.messages
-        if self.status is not None: body['status'] = self.status
+            body["is_library_for_all_clusters"] = self.is_library_for_all_clusters
+        if self.library:
+            body["library"] = self.library
+        if self.messages:
+            body["messages"] = self.messages
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LibraryFullStatus:
         """Deserializes the LibraryFullStatus from a dictionary."""
-        return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None),
-                   library=_from_dict(d, 'library', Library),
-                   messages=d.get('messages', None),
-                   status=_enum(d, 'status', LibraryInstallStatus))
+        return cls(
+            is_library_for_all_clusters=d.get("is_library_for_all_clusters", None),
+            library=_from_dict(d, "library", Library),
+            messages=d.get("messages", None),
+            status=_enum(d, "status", LibraryInstallStatus),
+        )
 
 
 class LibraryInstallStatus(Enum):
     """The status of a library on a specific cluster."""
 
-    FAILED = 'FAILED'
-    INSTALLED = 'INSTALLED'
-    INSTALLING = 'INSTALLING'
-    PENDING = 'PENDING'
-    RESOLVING = 'RESOLVING'
-    RESTORED = 'RESTORED'
-    SKIPPED = 'SKIPPED'
-    UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART'
+    FAILED = "FAILED"
+    INSTALLED = "INSTALLED"
+    INSTALLING = "INSTALLING"
+    PENDING = "PENDING"
+    RESOLVING = "RESOLVING"
+    RESTORED = "RESTORED"
+    SKIPPED = "SKIPPED"
+    UNINSTALL_ON_RESTART = "UNINSTALL_ON_RESTART"
 
 
 @dataclass
@@ -6029,19 +7092,21 @@ class ListAllClusterLibraryStatusesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAllClusterLibraryStatusesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses]
+        if self.statuses:
+            body["statuses"] = [v.as_dict() for v in self.statuses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAllClusterLibraryStatusesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.statuses: body['statuses'] = self.statuses
+        if self.statuses:
+            body["statuses"] = self.statuses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllClusterLibraryStatusesResponse:
         """Deserializes the ListAllClusterLibraryStatusesResponse from a dictionary."""
-        return cls(statuses=_repeated_dict(d, 'statuses', ClusterLibraryStatuses))
+        return cls(statuses=_repeated_dict(d, "statuses", ClusterLibraryStatuses))
 
 
 @dataclass
@@ -6055,21 +7120,28 @@ class ListAvailableZonesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAvailableZonesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default_zone is not None: body['default_zone'] = self.default_zone
-        if self.zones: body['zones'] = [v for v in self.zones]
+        if self.default_zone is not None:
+            body["default_zone"] = self.default_zone
+        if self.zones:
+            body["zones"] = [v for v in self.zones]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAvailableZonesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default_zone is not None: body['default_zone'] = self.default_zone
-        if self.zones: body['zones'] = self.zones
+        if self.default_zone is not None:
+            body["default_zone"] = self.default_zone
+        if self.zones:
+            body["zones"] = self.zones
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse:
         """Deserializes the ListAvailableZonesResponse from a dictionary."""
-        return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None))
+        return cls(
+            default_zone=d.get("default_zone", None),
+            zones=d.get("zones", None),
+        )
 
 
 @dataclass
@@ -6088,25 +7160,33 @@ class ListClusterCompliancesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListClusterCompliancesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.clusters:
+            body["clusters"] = [v.as_dict() for v in self.clusters]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListClusterCompliancesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clusters: body['clusters'] = self.clusters
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.clusters:
+            body["clusters"] = self.clusters
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClusterCompliancesResponse:
         """Deserializes the ListClusterCompliancesResponse from a dictionary."""
-        return cls(clusters=_repeated_dict(d, 'clusters', ClusterCompliance),
-                   next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None))
+        return cls(
+            clusters=_repeated_dict(d, "clusters", ClusterCompliance),
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+        )
 
 
 @dataclass
@@ -6126,28 +7206,38 @@ class ListClustersFilterBy:
     def as_dict(self) -> dict:
         """Serializes the ListClustersFilterBy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_sources: body['cluster_sources'] = [v.value for v in self.cluster_sources]
-        if self.cluster_states: body['cluster_states'] = [v.value for v in self.cluster_states]
-        if self.is_pinned is not None: body['is_pinned'] = self.is_pinned
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.cluster_sources:
+            body["cluster_sources"] = [v.value for v in self.cluster_sources]
+        if self.cluster_states:
+            body["cluster_states"] = [v.value for v in self.cluster_states]
+        if self.is_pinned is not None:
+            body["is_pinned"] = self.is_pinned
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListClustersFilterBy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_sources: body['cluster_sources'] = self.cluster_sources
-        if self.cluster_states: body['cluster_states'] = self.cluster_states
-        if self.is_pinned is not None: body['is_pinned'] = self.is_pinned
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.cluster_sources:
+            body["cluster_sources"] = self.cluster_sources
+        if self.cluster_states:
+            body["cluster_states"] = self.cluster_states
+        if self.is_pinned is not None:
+            body["is_pinned"] = self.is_pinned
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy:
         """Deserializes the ListClustersFilterBy from a dictionary."""
-        return cls(cluster_sources=_repeated_enum(d, 'cluster_sources', ClusterSource),
-                   cluster_states=_repeated_enum(d, 'cluster_states', State),
-                   is_pinned=d.get('is_pinned', None),
-                   policy_id=d.get('policy_id', None))
+        return cls(
+            cluster_sources=_repeated_enum(d, "cluster_sources", ClusterSource),
+            cluster_states=_repeated_enum(d, "cluster_states", State),
+            is_pinned=d.get("is_pinned", None),
+            policy_id=d.get("policy_id", None),
+        )
 
 
 @dataclass
@@ -6166,25 +7256,33 @@ class ListClustersResponse:
     def as_dict(self) -> dict:
         """Serializes the ListClustersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.clusters:
+            body["clusters"] = [v.as_dict() for v in self.clusters]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListClustersResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clusters: body['clusters'] = self.clusters
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.clusters:
+            body["clusters"] = self.clusters
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse:
         """Deserializes the ListClustersResponse from a dictionary."""
-        return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails),
-                   next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None))
+        return cls(
+            clusters=_repeated_dict(d, "clusters", ClusterDetails),
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+        )
 
 
 @dataclass
@@ -6199,37 +7297,43 @@ class ListClustersSortBy:
     def as_dict(self) -> dict:
         """Serializes the ListClustersSortBy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.direction is not None: body['direction'] = self.direction.value
-        if self.field is not None: body['field'] = self.field.value
+        if self.direction is not None:
+            body["direction"] = self.direction.value
+        if self.field is not None:
+            body["field"] = self.field.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListClustersSortBy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.direction is not None: body['direction'] = self.direction
-        if self.field is not None: body['field'] = self.field
+        if self.direction is not None:
+            body["direction"] = self.direction
+        if self.field is not None:
+            body["field"] = self.field
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy:
         """Deserializes the ListClustersSortBy from a dictionary."""
-        return cls(direction=_enum(d, 'direction', ListClustersSortByDirection),
-                   field=_enum(d, 'field', ListClustersSortByField))
+        return cls(
+            direction=_enum(d, "direction", ListClustersSortByDirection),
+            field=_enum(d, "field", ListClustersSortByField),
+        )
 
 
 class ListClustersSortByDirection(Enum):
     """The direction to sort by."""
 
-    ASC = 'ASC'
-    DESC = 'DESC'
+    ASC = "ASC"
+    DESC = "DESC"
 
 
 class ListClustersSortByField(Enum):
     """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest
     precedence: cluster state, pinned or unpinned, then cluster name."""
 
-    CLUSTER_NAME = 'CLUSTER_NAME'
-    DEFAULT = 'DEFAULT'
+    CLUSTER_NAME = "CLUSTER_NAME"
+    DEFAULT = "DEFAULT"
 
 
 @dataclass
@@ -6239,19 +7343,21 @@ class ListGlobalInitScriptsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListGlobalInitScriptsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts]
+        if self.scripts:
+            body["scripts"] = [v.as_dict() for v in self.scripts]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListGlobalInitScriptsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.scripts: body['scripts'] = self.scripts
+        if self.scripts:
+            body["scripts"] = self.scripts
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListGlobalInitScriptsResponse:
         """Deserializes the ListGlobalInitScriptsResponse from a dictionary."""
-        return cls(scripts=_repeated_dict(d, 'scripts', GlobalInitScriptDetails))
+        return cls(scripts=_repeated_dict(d, "scripts", GlobalInitScriptDetails))
 
 
 @dataclass
@@ -6261,19 +7367,21 @@ class ListInstancePools:
     def as_dict(self) -> dict:
         """Serializes the ListInstancePools into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools]
+        if self.instance_pools:
+            body["instance_pools"] = [v.as_dict() for v in self.instance_pools]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListInstancePools into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_pools: body['instance_pools'] = self.instance_pools
+        if self.instance_pools:
+            body["instance_pools"] = self.instance_pools
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstancePools:
         """Deserializes the ListInstancePools from a dictionary."""
-        return cls(instance_pools=_repeated_dict(d, 'instance_pools', InstancePoolAndStats))
+        return cls(instance_pools=_repeated_dict(d, "instance_pools", InstancePoolAndStats))
 
 
 @dataclass
@@ -6284,19 +7392,21 @@ class ListInstanceProfilesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListInstanceProfilesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles]
+        if self.instance_profiles:
+            body["instance_profiles"] = [v.as_dict() for v in self.instance_profiles]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListInstanceProfilesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_profiles: body['instance_profiles'] = self.instance_profiles
+        if self.instance_profiles:
+            body["instance_profiles"] = self.instance_profiles
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstanceProfilesResponse:
         """Deserializes the ListInstanceProfilesResponse from a dictionary."""
-        return cls(instance_profiles=_repeated_dict(d, 'instance_profiles', InstanceProfile))
+        return cls(instance_profiles=_repeated_dict(d, "instance_profiles", InstanceProfile))
 
 
 @dataclass
@@ -6307,19 +7417,21 @@ class ListNodeTypesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListNodeTypesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types]
+        if self.node_types:
+            body["node_types"] = [v.as_dict() for v in self.node_types]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListNodeTypesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.node_types: body['node_types'] = self.node_types
+        if self.node_types:
+            body["node_types"] = self.node_types
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNodeTypesResponse:
         """Deserializes the ListNodeTypesResponse from a dictionary."""
-        return cls(node_types=_repeated_dict(d, 'node_types', NodeType))
+        return cls(node_types=_repeated_dict(d, "node_types", NodeType))
 
 
 @dataclass
@@ -6330,19 +7442,21 @@ class ListPoliciesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListPoliciesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
+        if self.policies:
+            body["policies"] = [v.as_dict() for v in self.policies]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.policies: body['policies'] = self.policies
+        if self.policies:
+            body["policies"] = self.policies
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse:
         """Deserializes the ListPoliciesResponse from a dictionary."""
-        return cls(policies=_repeated_dict(d, 'policies', Policy))
+        return cls(policies=_repeated_dict(d, "policies", Policy))
 
 
 @dataclass
@@ -6357,35 +7471,41 @@ class ListPolicyFamiliesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListPolicyFamiliesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.policy_families:
+            body["policy_families"] = [v.as_dict() for v in self.policy_families]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListPolicyFamiliesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.policy_families: body['policy_families'] = self.policy_families
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.policy_families:
+            body["policy_families"] = self.policy_families
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPolicyFamiliesResponse:
         """Deserializes the ListPolicyFamiliesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   policy_families=_repeated_dict(d, 'policy_families', PolicyFamily))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            policy_families=_repeated_dict(d, "policy_families", PolicyFamily),
+        )
 
 
 class ListSortColumn(Enum):
 
-    POLICY_CREATION_TIME = 'POLICY_CREATION_TIME'
-    POLICY_NAME = 'POLICY_NAME'
+    POLICY_CREATION_TIME = "POLICY_CREATION_TIME"
+    POLICY_NAME = "POLICY_NAME"
 
 
 class ListSortOrder(Enum):
     """A generic ordering enum for list-based queries."""
 
-    ASC = 'ASC'
-    DESC = 'DESC'
+    ASC = "ASC"
+    DESC = "DESC"
 
 
 @dataclass
@@ -6396,19 +7516,21 @@ class LocalFileInfo:
     def as_dict(self) -> dict:
         """Serializes the LocalFileInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LocalFileInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LocalFileInfo:
         """Deserializes the LocalFileInfo from a dictionary."""
-        return cls(destination=d.get('destination', None))
+        return cls(destination=d.get("destination", None))
 
 
 @dataclass
@@ -6423,25 +7545,27 @@ def as_dict(self) -> dict:
         """Serializes the LogAnalyticsInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.log_analytics_primary_key is not None:
-            body['log_analytics_primary_key'] = self.log_analytics_primary_key
+            body["log_analytics_primary_key"] = self.log_analytics_primary_key
         if self.log_analytics_workspace_id is not None:
-            body['log_analytics_workspace_id'] = self.log_analytics_workspace_id
+            body["log_analytics_workspace_id"] = self.log_analytics_workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogAnalyticsInfo into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.log_analytics_primary_key is not None:
-            body['log_analytics_primary_key'] = self.log_analytics_primary_key
+            body["log_analytics_primary_key"] = self.log_analytics_primary_key
         if self.log_analytics_workspace_id is not None:
-            body['log_analytics_workspace_id'] = self.log_analytics_workspace_id
+            body["log_analytics_workspace_id"] = self.log_analytics_workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogAnalyticsInfo:
         """Deserializes the LogAnalyticsInfo from a dictionary."""
-        return cls(log_analytics_primary_key=d.get('log_analytics_primary_key', None),
-                   log_analytics_workspace_id=d.get('log_analytics_workspace_id', None))
+        return cls(
+            log_analytics_primary_key=d.get("log_analytics_primary_key", None),
+            log_analytics_workspace_id=d.get("log_analytics_workspace_id", None),
+        )
 
 
 @dataclass
@@ -6457,21 +7581,28 @@ class LogSyncStatus:
     def as_dict(self) -> dict:
         """Serializes the LogSyncStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.last_attempted is not None: body['last_attempted'] = self.last_attempted
-        if self.last_exception is not None: body['last_exception'] = self.last_exception
+        if self.last_attempted is not None:
+            body["last_attempted"] = self.last_attempted
+        if self.last_exception is not None:
+            body["last_exception"] = self.last_exception
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogSyncStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.last_attempted is not None: body['last_attempted'] = self.last_attempted
-        if self.last_exception is not None: body['last_exception'] = self.last_exception
+        if self.last_attempted is not None:
+            body["last_attempted"] = self.last_attempted
+        if self.last_exception is not None:
+            body["last_exception"] = self.last_exception
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogSyncStatus:
         """Deserializes the LogSyncStatus from a dictionary."""
-        return cls(last_attempted=d.get('last_attempted', None), last_exception=d.get('last_exception', None))
+        return cls(
+            last_attempted=d.get("last_attempted", None),
+            last_exception=d.get("last_exception", None),
+        )
 
 
 @dataclass
@@ -6492,25 +7623,33 @@ class MavenLibrary:
     def as_dict(self) -> dict:
         """Serializes the MavenLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.coordinates is not None: body['coordinates'] = self.coordinates
-        if self.exclusions: body['exclusions'] = [v for v in self.exclusions]
-        if self.repo is not None: body['repo'] = self.repo
+        if self.coordinates is not None:
+            body["coordinates"] = self.coordinates
+        if self.exclusions:
+            body["exclusions"] = [v for v in self.exclusions]
+        if self.repo is not None:
+            body["repo"] = self.repo
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MavenLibrary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.coordinates is not None: body['coordinates'] = self.coordinates
-        if self.exclusions: body['exclusions'] = self.exclusions
-        if self.repo is not None: body['repo'] = self.repo
+        if self.coordinates is not None:
+            body["coordinates"] = self.coordinates
+        if self.exclusions:
+            body["exclusions"] = self.exclusions
+        if self.repo is not None:
+            body["repo"] = self.repo
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MavenLibrary:
         """Deserializes the MavenLibrary from a dictionary."""
-        return cls(coordinates=d.get('coordinates', None),
-                   exclusions=d.get('exclusions', None),
-                   repo=d.get('repo', None))
+        return cls(
+            coordinates=d.get("coordinates", None),
+            exclusions=d.get("exclusions", None),
+            repo=d.get("repo", None),
+        )
 
 
 @dataclass
@@ -6528,33 +7667,43 @@ class NodeInstanceType:
     def as_dict(self) -> dict:
         """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
-        if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb
-        if self.local_disks is not None: body['local_disks'] = self.local_disks
+        if self.instance_type_id is not None:
+            body["instance_type_id"] = self.instance_type_id
+        if self.local_disk_size_gb is not None:
+            body["local_disk_size_gb"] = self.local_disk_size_gb
+        if self.local_disks is not None:
+            body["local_disks"] = self.local_disks
         if self.local_nvme_disk_size_gb is not None:
-            body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb
-        if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks
+            body["local_nvme_disk_size_gb"] = self.local_nvme_disk_size_gb
+        if self.local_nvme_disks is not None:
+            body["local_nvme_disks"] = self.local_nvme_disks
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NodeInstanceType into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
-        if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb
-        if self.local_disks is not None: body['local_disks'] = self.local_disks
+        if self.instance_type_id is not None:
+            body["instance_type_id"] = self.instance_type_id
+        if self.local_disk_size_gb is not None:
+            body["local_disk_size_gb"] = self.local_disk_size_gb
+        if self.local_disks is not None:
+            body["local_disks"] = self.local_disks
         if self.local_nvme_disk_size_gb is not None:
-            body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb
-        if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks
+            body["local_nvme_disk_size_gb"] = self.local_nvme_disk_size_gb
+        if self.local_nvme_disks is not None:
+            body["local_nvme_disks"] = self.local_nvme_disks
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NodeInstanceType:
         """Deserializes the NodeInstanceType from a dictionary."""
-        return cls(instance_type_id=d.get('instance_type_id', None),
-                   local_disk_size_gb=d.get('local_disk_size_gb', None),
-                   local_disks=d.get('local_disks', None),
-                   local_nvme_disk_size_gb=d.get('local_nvme_disk_size_gb', None),
-                   local_nvme_disks=d.get('local_nvme_disks', None))
+        return cls(
+            instance_type_id=d.get("instance_type_id", None),
+            local_disk_size_gb=d.get("local_disk_size_gb", None),
+            local_disks=d.get("local_disks", None),
+            local_nvme_disk_size_gb=d.get("local_nvme_disk_size_gb", None),
+            local_nvme_disks=d.get("local_nvme_disks", None),
+        )
 
 
 @dataclass
@@ -6616,83 +7765,123 @@ class NodeType:
     def as_dict(self) -> dict:
         """Serializes the NodeType into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.category is not None: body['category'] = self.category
-        if self.description is not None: body['description'] = self.description
-        if self.display_order is not None: body['display_order'] = self.display_order
-        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
-        if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated
+        if self.category is not None:
+            body["category"] = self.category
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_order is not None:
+            body["display_order"] = self.display_order
+        if self.instance_type_id is not None:
+            body["instance_type_id"] = self.instance_type_id
+        if self.is_deprecated is not None:
+            body["is_deprecated"] = self.is_deprecated
         if self.is_encrypted_in_transit is not None:
-            body['is_encrypted_in_transit'] = self.is_encrypted_in_transit
-        if self.is_graviton is not None: body['is_graviton'] = self.is_graviton
-        if self.is_hidden is not None: body['is_hidden'] = self.is_hidden
-        if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled
-        if self.memory_mb is not None: body['memory_mb'] = self.memory_mb
-        if self.node_info: body['node_info'] = self.node_info.as_dict()
-        if self.node_instance_type: body['node_instance_type'] = self.node_instance_type.as_dict()
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_cores is not None: body['num_cores'] = self.num_cores
-        if self.num_gpus is not None: body['num_gpus'] = self.num_gpus
-        if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable
-        if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable
-        if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags
-        if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes
+            body["is_encrypted_in_transit"] = self.is_encrypted_in_transit
+        if self.is_graviton is not None:
+            body["is_graviton"] = self.is_graviton
+        if self.is_hidden is not None:
+            body["is_hidden"] = self.is_hidden
+        if self.is_io_cache_enabled is not None:
+            body["is_io_cache_enabled"] = self.is_io_cache_enabled
+        if self.memory_mb is not None:
+            body["memory_mb"] = self.memory_mb
+        if self.node_info:
+            body["node_info"] = self.node_info.as_dict()
+        if self.node_instance_type:
+            body["node_instance_type"] = self.node_instance_type.as_dict()
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_cores is not None:
+            body["num_cores"] = self.num_cores
+        if self.num_gpus is not None:
+            body["num_gpus"] = self.num_gpus
+        if self.photon_driver_capable is not None:
+            body["photon_driver_capable"] = self.photon_driver_capable
+        if self.photon_worker_capable is not None:
+            body["photon_worker_capable"] = self.photon_worker_capable
+        if self.support_cluster_tags is not None:
+            body["support_cluster_tags"] = self.support_cluster_tags
+        if self.support_ebs_volumes is not None:
+            body["support_ebs_volumes"] = self.support_ebs_volumes
         if self.support_port_forwarding is not None:
-            body['support_port_forwarding'] = self.support_port_forwarding
-        if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk
+            body["support_port_forwarding"] = self.support_port_forwarding
+        if self.supports_elastic_disk is not None:
+            body["supports_elastic_disk"] = self.supports_elastic_disk
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NodeType into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.category is not None: body['category'] = self.category
-        if self.description is not None: body['description'] = self.description
-        if self.display_order is not None: body['display_order'] = self.display_order
-        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
-        if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated
+        if self.category is not None:
+            body["category"] = self.category
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_order is not None:
+            body["display_order"] = self.display_order
+        if self.instance_type_id is not None:
+            body["instance_type_id"] = self.instance_type_id
+        if self.is_deprecated is not None:
+            body["is_deprecated"] = self.is_deprecated
         if self.is_encrypted_in_transit is not None:
-            body['is_encrypted_in_transit'] = self.is_encrypted_in_transit
-        if self.is_graviton is not None: body['is_graviton'] = self.is_graviton
-        if self.is_hidden is not None: body['is_hidden'] = self.is_hidden
-        if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled
-        if self.memory_mb is not None: body['memory_mb'] = self.memory_mb
-        if self.node_info: body['node_info'] = self.node_info
-        if self.node_instance_type: body['node_instance_type'] = self.node_instance_type
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_cores is not None: body['num_cores'] = self.num_cores
-        if self.num_gpus is not None: body['num_gpus'] = self.num_gpus
-        if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable
-        if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable
-        if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags
-        if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes
+            body["is_encrypted_in_transit"] = self.is_encrypted_in_transit
+        if self.is_graviton is not None:
+            body["is_graviton"] = self.is_graviton
+        if self.is_hidden is not None:
+            body["is_hidden"] = self.is_hidden
+        if self.is_io_cache_enabled is not None:
+            body["is_io_cache_enabled"] = self.is_io_cache_enabled
+        if self.memory_mb is not None:
+            body["memory_mb"] = self.memory_mb
+        if self.node_info:
+            body["node_info"] = self.node_info
+        if self.node_instance_type:
+            body["node_instance_type"] = self.node_instance_type
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_cores is not None:
+            body["num_cores"] = self.num_cores
+        if self.num_gpus is not None:
+            body["num_gpus"] = self.num_gpus
+        if self.photon_driver_capable is not None:
+            body["photon_driver_capable"] = self.photon_driver_capable
+        if self.photon_worker_capable is not None:
+            body["photon_worker_capable"] = self.photon_worker_capable
+        if self.support_cluster_tags is not None:
+            body["support_cluster_tags"] = self.support_cluster_tags
+        if self.support_ebs_volumes is not None:
+            body["support_ebs_volumes"] = self.support_ebs_volumes
         if self.support_port_forwarding is not None:
-            body['support_port_forwarding'] = self.support_port_forwarding
-        if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk
+            body["support_port_forwarding"] = self.support_port_forwarding
+        if self.supports_elastic_disk is not None:
+            body["supports_elastic_disk"] = self.supports_elastic_disk
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NodeType:
         """Deserializes the NodeType from a dictionary."""
-        return cls(category=d.get('category', None),
-                   description=d.get('description', None),
-                   display_order=d.get('display_order', None),
-                   instance_type_id=d.get('instance_type_id', None),
-                   is_deprecated=d.get('is_deprecated', None),
-                   is_encrypted_in_transit=d.get('is_encrypted_in_transit', None),
-                   is_graviton=d.get('is_graviton', None),
-                   is_hidden=d.get('is_hidden', None),
-                   is_io_cache_enabled=d.get('is_io_cache_enabled', None),
-                   memory_mb=d.get('memory_mb', None),
-                   node_info=_from_dict(d, 'node_info', CloudProviderNodeInfo),
-                   node_instance_type=_from_dict(d, 'node_instance_type', NodeInstanceType),
-                   node_type_id=d.get('node_type_id', None),
-                   num_cores=d.get('num_cores', None),
-                   num_gpus=d.get('num_gpus', None),
-                   photon_driver_capable=d.get('photon_driver_capable', None),
-                   photon_worker_capable=d.get('photon_worker_capable', None),
-                   support_cluster_tags=d.get('support_cluster_tags', None),
-                   support_ebs_volumes=d.get('support_ebs_volumes', None),
-                   support_port_forwarding=d.get('support_port_forwarding', None),
-                   supports_elastic_disk=d.get('supports_elastic_disk', None))
+        return cls(
+            category=d.get("category", None),
+            description=d.get("description", None),
+            display_order=d.get("display_order", None),
+            instance_type_id=d.get("instance_type_id", None),
+            is_deprecated=d.get("is_deprecated", None),
+            is_encrypted_in_transit=d.get("is_encrypted_in_transit", None),
+            is_graviton=d.get("is_graviton", None),
+            is_hidden=d.get("is_hidden", None),
+            is_io_cache_enabled=d.get("is_io_cache_enabled", None),
+            memory_mb=d.get("memory_mb", None),
+            node_info=_from_dict(d, "node_info", CloudProviderNodeInfo),
+            node_instance_type=_from_dict(d, "node_instance_type", NodeInstanceType),
+            node_type_id=d.get("node_type_id", None),
+            num_cores=d.get("num_cores", None),
+            num_gpus=d.get("num_gpus", None),
+            photon_driver_capable=d.get("photon_driver_capable", None),
+            photon_worker_capable=d.get("photon_worker_capable", None),
+            support_cluster_tags=d.get("support_cluster_tags", None),
+            support_ebs_volumes=d.get("support_ebs_volumes", None),
+            support_port_forwarding=d.get("support_port_forwarding", None),
+            supports_elastic_disk=d.get("supports_elastic_disk", None),
+        )
 
 
 @dataclass
@@ -6704,21 +7893,28 @@ class PendingInstanceError:
     def as_dict(self) -> dict:
         """Serializes the PendingInstanceError into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_id is not None: body['instance_id'] = self.instance_id
-        if self.message is not None: body['message'] = self.message
+        if self.instance_id is not None:
+            body["instance_id"] = self.instance_id
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PendingInstanceError into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_id is not None: body['instance_id'] = self.instance_id
-        if self.message is not None: body['message'] = self.message
+        if self.instance_id is not None:
+            body["instance_id"] = self.instance_id
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PendingInstanceError:
         """Deserializes the PendingInstanceError from a dictionary."""
-        return cls(instance_id=d.get('instance_id', None), message=d.get('message', None))
+        return cls(
+            instance_id=d.get("instance_id", None),
+            message=d.get("message", None),
+        )
 
 
 @dataclass
@@ -6729,19 +7925,21 @@ class PermanentDeleteCluster:
     def as_dict(self) -> dict:
         """Serializes the PermanentDeleteCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteCluster:
         """Deserializes the PermanentDeleteCluster from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None))
+        return cls(cluster_id=d.get("cluster_id", None))
 
 
 @dataclass
@@ -6771,19 +7969,21 @@ class PinCluster:
     def as_dict(self) -> dict:
         """Serializes the PinCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PinCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PinCluster:
         """Deserializes the PinCluster from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None))
+        return cls(cluster_id=d.get("cluster_id", None))
 
 
 @dataclass
@@ -6862,51 +8062,73 @@ class Policy:
     def as_dict(self) -> dict:
         """Serializes the Policy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.is_default is not None: body['is_default'] = self.is_default
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
-        if self.name is not None: body['name'] = self.name
+        if self.created_at_timestamp is not None:
+            body["created_at_timestamp"] = self.created_at_timestamp
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.is_default is not None:
+            body["is_default"] = self.is_default
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = self.max_clusters_per_user
+        if self.name is not None:
+            body["name"] = self.name
         if self.policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+            body["policy_family_definition_overrides"] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Policy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.is_default is not None: body['is_default'] = self.is_default
-        if self.libraries: body['libraries'] = self.libraries
-        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
-        if self.name is not None: body['name'] = self.name
+        if self.created_at_timestamp is not None:
+            body["created_at_timestamp"] = self.created_at_timestamp
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.is_default is not None:
+            body["is_default"] = self.is_default
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = self.max_clusters_per_user
+        if self.name is not None:
+            body["name"] = self.name
         if self.policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
+            body["policy_family_definition_overrides"] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Policy:
         """Deserializes the Policy from a dictionary."""
-        return cls(created_at_timestamp=d.get('created_at_timestamp', None),
-                   creator_user_name=d.get('creator_user_name', None),
-                   definition=d.get('definition', None),
-                   description=d.get('description', None),
-                   is_default=d.get('is_default', None),
-                   libraries=_repeated_dict(d, 'libraries', Library),
-                   max_clusters_per_user=d.get('max_clusters_per_user', None),
-                   name=d.get('name', None),
-                   policy_family_definition_overrides=d.get('policy_family_definition_overrides', None),
-                   policy_family_id=d.get('policy_family_id', None),
-                   policy_id=d.get('policy_id', None))
+        return cls(
+            created_at_timestamp=d.get("created_at_timestamp", None),
+            creator_user_name=d.get("creator_user_name", None),
+            definition=d.get("definition", None),
+            description=d.get("description", None),
+            is_default=d.get("is_default", None),
+            libraries=_repeated_dict(d, "libraries", Library),
+            max_clusters_per_user=d.get("max_clusters_per_user", None),
+            name=d.get("name", None),
+            policy_family_definition_overrides=d.get("policy_family_definition_overrides", None),
+            policy_family_id=d.get("policy_family_id", None),
+            policy_id=d.get("policy_id", None),
+        )
 
 
 @dataclass
@@ -6928,28 +8150,38 @@ class PolicyFamily:
     def as_dict(self) -> dict:
         """Serializes the PolicyFamily into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PolicyFamily into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.definition is not None: body['definition'] = self.definition
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        if self.definition is not None:
+            body["definition"] = self.definition
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.policy_family_id is not None:
+            body["policy_family_id"] = self.policy_family_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PolicyFamily:
         """Deserializes the PolicyFamily from a dictionary."""
-        return cls(definition=d.get('definition', None),
-                   description=d.get('description', None),
-                   name=d.get('name', None),
-                   policy_family_id=d.get('policy_family_id', None))
+        return cls(
+            definition=d.get("definition", None),
+            description=d.get("description", None),
+            name=d.get("name", None),
+            policy_family_id=d.get("policy_family_id", None),
+        )
 
 
 @dataclass
@@ -6964,21 +8196,25 @@ class PythonPyPiLibrary:
     def as_dict(self) -> dict:
         """Serializes the PythonPyPiLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.package is not None: body['package'] = self.package
-        if self.repo is not None: body['repo'] = self.repo
+        if self.package is not None:
+            body["package"] = self.package
+        if self.repo is not None:
+            body["repo"] = self.repo
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PythonPyPiLibrary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.package is not None: body['package'] = self.package
-        if self.repo is not None: body['repo'] = self.repo
+        if self.package is not None:
+            body["package"] = self.package
+        if self.repo is not None:
+            body["repo"] = self.repo
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PythonPyPiLibrary:
         """Deserializes the PythonPyPiLibrary from a dictionary."""
-        return cls(package=d.get('package', None), repo=d.get('repo', None))
+        return cls(package=d.get("package", None), repo=d.get("repo", None))
 
 
 @dataclass
@@ -6992,21 +8228,25 @@ class RCranLibrary:
     def as_dict(self) -> dict:
         """Serializes the RCranLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.package is not None: body['package'] = self.package
-        if self.repo is not None: body['repo'] = self.repo
+        if self.package is not None:
+            body["package"] = self.package
+        if self.repo is not None:
+            body["repo"] = self.repo
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RCranLibrary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.package is not None: body['package'] = self.package
-        if self.repo is not None: body['repo'] = self.repo
+        if self.package is not None:
+            body["package"] = self.package
+        if self.repo is not None:
+            body["repo"] = self.repo
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RCranLibrary:
         """Deserializes the RCranLibrary from a dictionary."""
-        return cls(package=d.get('package', None), repo=d.get('repo', None))
+        return cls(package=d.get("package", None), repo=d.get("repo", None))
 
 
 @dataclass
@@ -7017,19 +8257,21 @@ class RemoveInstanceProfile:
     def as_dict(self) -> dict:
         """Serializes the RemoveInstanceProfile into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveInstanceProfile:
         """Deserializes the RemoveInstanceProfile from a dictionary."""
-        return cls(instance_profile_arn=d.get('instance_profile_arn', None))
+        return cls(instance_profile_arn=d.get("instance_profile_arn", None))
 
 
 @dataclass
@@ -7073,25 +8315,33 @@ class ResizeCluster:
     def as_dict(self) -> dict:
         """Serializes the ResizeCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResizeCluster:
         """Deserializes the ResizeCluster from a dictionary."""
-        return cls(autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   cluster_id=d.get('cluster_id', None),
-                   num_workers=d.get('num_workers', None))
+        return cls(
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            cluster_id=d.get("cluster_id", None),
+            num_workers=d.get("num_workers", None),
+        )
 
 
 @dataclass
@@ -7124,21 +8374,28 @@ class RestartCluster:
     def as_dict(self) -> dict:
         """Serializes the RestartCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.restart_user is not None: body['restart_user'] = self.restart_user
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.restart_user is not None:
+            body["restart_user"] = self.restart_user
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestartCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.restart_user is not None: body['restart_user'] = self.restart_user
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.restart_user is not None:
+            body["restart_user"] = self.restart_user
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartCluster:
         """Deserializes the RestartCluster from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None), restart_user=d.get('restart_user', None))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            restart_user=d.get("restart_user", None),
+        )
 
 
 @dataclass
@@ -7162,11 +8419,11 @@ def from_dict(cls, d: Dict[str, any]) -> RestartClusterResponse:
 
 class ResultType(Enum):
 
-    ERROR = 'error'
-    IMAGE = 'image'
-    IMAGES = 'images'
-    TABLE = 'table'
-    TEXT = 'text'
+    ERROR = "error"
+    IMAGE = "image"
+    IMAGES = "images"
+    TABLE = "table"
+    TEXT = "text"
 
 
 @dataclass
@@ -7201,60 +8458,82 @@ class Results:
     def as_dict(self) -> dict:
         """Serializes the Results into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause
-        if self.data: body['data'] = self.data
-        if self.file_name is not None: body['fileName'] = self.file_name
-        if self.file_names: body['fileNames'] = [v for v in self.file_names]
-        if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema
-        if self.pos is not None: body['pos'] = self.pos
-        if self.result_type is not None: body['resultType'] = self.result_type.value
-        if self.schema: body['schema'] = [v for v in self.schema]
-        if self.summary is not None: body['summary'] = self.summary
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.data:
+            body["data"] = self.data
+        if self.file_name is not None:
+            body["fileName"] = self.file_name
+        if self.file_names:
+            body["fileNames"] = [v for v in self.file_names]
+        if self.is_json_schema is not None:
+            body["isJsonSchema"] = self.is_json_schema
+        if self.pos is not None:
+            body["pos"] = self.pos
+        if self.result_type is not None:
+            body["resultType"] = self.result_type.value
+        if self.schema:
+            body["schema"] = [v for v in self.schema]
+        if self.summary is not None:
+            body["summary"] = self.summary
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Results into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause
-        if self.data: body['data'] = self.data
-        if self.file_name is not None: body['fileName'] = self.file_name
-        if self.file_names: body['fileNames'] = self.file_names
-        if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema
-        if self.pos is not None: body['pos'] = self.pos
-        if self.result_type is not None: body['resultType'] = self.result_type
-        if self.schema: body['schema'] = self.schema
-        if self.summary is not None: body['summary'] = self.summary
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.data:
+            body["data"] = self.data
+        if self.file_name is not None:
+            body["fileName"] = self.file_name
+        if self.file_names:
+            body["fileNames"] = self.file_names
+        if self.is_json_schema is not None:
+            body["isJsonSchema"] = self.is_json_schema
+        if self.pos is not None:
+            body["pos"] = self.pos
+        if self.result_type is not None:
+            body["resultType"] = self.result_type
+        if self.schema:
+            body["schema"] = self.schema
+        if self.summary is not None:
+            body["summary"] = self.summary
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Results:
         """Deserializes the Results from a dictionary."""
-        return cls(cause=d.get('cause', None),
-                   data=d.get('data', None),
-                   file_name=d.get('fileName', None),
-                   file_names=d.get('fileNames', None),
-                   is_json_schema=d.get('isJsonSchema', None),
-                   pos=d.get('pos', None),
-                   result_type=_enum(d, 'resultType', ResultType),
-                   schema=d.get('schema', None),
-                   summary=d.get('summary', None),
-                   truncated=d.get('truncated', None))
+        return cls(
+            cause=d.get("cause", None),
+            data=d.get("data", None),
+            file_name=d.get("fileName", None),
+            file_names=d.get("fileNames", None),
+            is_json_schema=d.get("isJsonSchema", None),
+            pos=d.get("pos", None),
+            result_type=_enum(d, "resultType", ResultType),
+            schema=d.get("schema", None),
+            summary=d.get("summary", None),
+            truncated=d.get("truncated", None),
+        )
 
 
 class RuntimeEngine(Enum):
     """Determines the cluster's runtime engine, either standard or Photon.
-    
+
     This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
     `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-    
+
     If left unspecified, the runtime engine defaults to standard unless the spark_version contains
     -photon-, in which case Photon will be used."""
 
-    NULL = 'NULL'
-    PHOTON = 'PHOTON'
-    STANDARD = 'STANDARD'
+    NULL = "NULL"
+    PHOTON = "PHOTON"
+    STANDARD = "STANDARD"
 
 
 @dataclass
@@ -7295,37 +8574,53 @@ class S3StorageInfo:
     def as_dict(self) -> dict:
         """Serializes the S3StorageInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.canned_acl is not None: body['canned_acl'] = self.canned_acl
-        if self.destination is not None: body['destination'] = self.destination
-        if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption
-        if self.encryption_type is not None: body['encryption_type'] = self.encryption_type
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
-        if self.kms_key is not None: body['kms_key'] = self.kms_key
-        if self.region is not None: body['region'] = self.region
+        if self.canned_acl is not None:
+            body["canned_acl"] = self.canned_acl
+        if self.destination is not None:
+            body["destination"] = self.destination
+        if self.enable_encryption is not None:
+            body["enable_encryption"] = self.enable_encryption
+        if self.encryption_type is not None:
+            body["encryption_type"] = self.encryption_type
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
+        if self.kms_key is not None:
+            body["kms_key"] = self.kms_key
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the S3StorageInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.canned_acl is not None: body['canned_acl'] = self.canned_acl
-        if self.destination is not None: body['destination'] = self.destination
-        if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption
-        if self.encryption_type is not None: body['encryption_type'] = self.encryption_type
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
-        if self.kms_key is not None: body['kms_key'] = self.kms_key
-        if self.region is not None: body['region'] = self.region
+        if self.canned_acl is not None:
+            body["canned_acl"] = self.canned_acl
+        if self.destination is not None:
+            body["destination"] = self.destination
+        if self.enable_encryption is not None:
+            body["enable_encryption"] = self.enable_encryption
+        if self.encryption_type is not None:
+            body["encryption_type"] = self.encryption_type
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
+        if self.kms_key is not None:
+            body["kms_key"] = self.kms_key
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> S3StorageInfo:
         """Deserializes the S3StorageInfo from a dictionary."""
-        return cls(canned_acl=d.get('canned_acl', None),
-                   destination=d.get('destination', None),
-                   enable_encryption=d.get('enable_encryption', None),
-                   encryption_type=d.get('encryption_type', None),
-                   endpoint=d.get('endpoint', None),
-                   kms_key=d.get('kms_key', None),
-                   region=d.get('region', None))
+        return cls(
+            canned_acl=d.get("canned_acl", None),
+            destination=d.get("destination", None),
+            enable_encryption=d.get("enable_encryption", None),
+            encryption_type=d.get("encryption_type", None),
+            endpoint=d.get("endpoint", None),
+            kms_key=d.get("kms_key", None),
+            region=d.get("region", None),
+        )
 
 
 @dataclass
@@ -7363,37 +8658,53 @@ class SparkNode:
     def as_dict(self) -> dict:
         """Serializes the SparkNode into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip
-        if self.instance_id is not None: body['instance_id'] = self.instance_id
-        if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes.as_dict()
-        if self.node_id is not None: body['node_id'] = self.node_id
-        if self.private_ip is not None: body['private_ip'] = self.private_ip
-        if self.public_dns is not None: body['public_dns'] = self.public_dns
-        if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp
+        if self.host_private_ip is not None:
+            body["host_private_ip"] = self.host_private_ip
+        if self.instance_id is not None:
+            body["instance_id"] = self.instance_id
+        if self.node_aws_attributes:
+            body["node_aws_attributes"] = self.node_aws_attributes.as_dict()
+        if self.node_id is not None:
+            body["node_id"] = self.node_id
+        if self.private_ip is not None:
+            body["private_ip"] = self.private_ip
+        if self.public_dns is not None:
+            body["public_dns"] = self.public_dns
+        if self.start_timestamp is not None:
+            body["start_timestamp"] = self.start_timestamp
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparkNode into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip
-        if self.instance_id is not None: body['instance_id'] = self.instance_id
-        if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes
-        if self.node_id is not None: body['node_id'] = self.node_id
-        if self.private_ip is not None: body['private_ip'] = self.private_ip
-        if self.public_dns is not None: body['public_dns'] = self.public_dns
-        if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp
+        if self.host_private_ip is not None:
+            body["host_private_ip"] = self.host_private_ip
+        if self.instance_id is not None:
+            body["instance_id"] = self.instance_id
+        if self.node_aws_attributes:
+            body["node_aws_attributes"] = self.node_aws_attributes
+        if self.node_id is not None:
+            body["node_id"] = self.node_id
+        if self.private_ip is not None:
+            body["private_ip"] = self.private_ip
+        if self.public_dns is not None:
+            body["public_dns"] = self.public_dns
+        if self.start_timestamp is not None:
+            body["start_timestamp"] = self.start_timestamp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkNode:
         """Deserializes the SparkNode from a dictionary."""
-        return cls(host_private_ip=d.get('host_private_ip', None),
-                   instance_id=d.get('instance_id', None),
-                   node_aws_attributes=_from_dict(d, 'node_aws_attributes', SparkNodeAwsAttributes),
-                   node_id=d.get('node_id', None),
-                   private_ip=d.get('private_ip', None),
-                   public_dns=d.get('public_dns', None),
-                   start_timestamp=d.get('start_timestamp', None))
+        return cls(
+            host_private_ip=d.get("host_private_ip", None),
+            instance_id=d.get("instance_id", None),
+            node_aws_attributes=_from_dict(d, "node_aws_attributes", SparkNodeAwsAttributes),
+            node_id=d.get("node_id", None),
+            private_ip=d.get("private_ip", None),
+            public_dns=d.get("public_dns", None),
+            start_timestamp=d.get("start_timestamp", None),
+        )
 
 
 @dataclass
@@ -7404,19 +8715,21 @@ class SparkNodeAwsAttributes:
     def as_dict(self) -> dict:
         """Serializes the SparkNodeAwsAttributes into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_spot is not None: body['is_spot'] = self.is_spot
+        if self.is_spot is not None:
+            body["is_spot"] = self.is_spot
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparkNodeAwsAttributes into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_spot is not None: body['is_spot'] = self.is_spot
+        if self.is_spot is not None:
+            body["is_spot"] = self.is_spot
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkNodeAwsAttributes:
         """Deserializes the SparkNodeAwsAttributes from a dictionary."""
-        return cls(is_spot=d.get('is_spot', None))
+        return cls(is_spot=d.get("is_spot", None))
 
 
 @dataclass
@@ -7433,21 +8746,25 @@ class SparkVersion:
     def as_dict(self) -> dict:
         """Serializes the SparkVersion into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.name is not None: body['name'] = self.name
+        if self.key is not None:
+            body["key"] = self.key
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparkVersion into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.name is not None: body['name'] = self.name
+        if self.key is not None:
+            body["key"] = self.key
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkVersion:
         """Deserializes the SparkVersion from a dictionary."""
-        return cls(key=d.get('key', None), name=d.get('name', None))
+        return cls(key=d.get("key", None), name=d.get("name", None))
 
 
 @dataclass
@@ -7458,19 +8775,21 @@ class StartCluster:
     def as_dict(self) -> dict:
         """Serializes the StartCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StartCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartCluster:
         """Deserializes the StartCluster from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None))
+        return cls(cluster_id=d.get("cluster_id", None))
 
 
 @dataclass
@@ -7495,14 +8814,14 @@ def from_dict(cls, d: Dict[str, any]) -> StartClusterResponse:
 class State(Enum):
     """Current state of the cluster."""
 
-    ERROR = 'ERROR'
-    PENDING = 'PENDING'
-    RESIZING = 'RESIZING'
-    RESTARTING = 'RESTARTING'
-    RUNNING = 'RUNNING'
-    TERMINATED = 'TERMINATED'
-    TERMINATING = 'TERMINATING'
-    UNKNOWN = 'UNKNOWN'
+    ERROR = "ERROR"
+    PENDING = "PENDING"
+    RESIZING = "RESIZING"
+    RESTARTING = "RESTARTING"
+    RUNNING = "RUNNING"
+    TERMINATED = "TERMINATED"
+    TERMINATING = "TERMINATING"
+    UNKNOWN = "UNKNOWN"
 
 
 @dataclass
@@ -7519,118 +8838,126 @@ class TerminationReason:
     def as_dict(self) -> dict:
         """Serializes the TerminationReason into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.code is not None: body['code'] = self.code.value
-        if self.parameters: body['parameters'] = self.parameters
-        if self.type is not None: body['type'] = self.type.value
+        if self.code is not None:
+            body["code"] = self.code.value
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TerminationReason into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.code is not None: body['code'] = self.code
-        if self.parameters: body['parameters'] = self.parameters
-        if self.type is not None: body['type'] = self.type
+        if self.code is not None:
+            body["code"] = self.code
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationReason:
         """Deserializes the TerminationReason from a dictionary."""
-        return cls(code=_enum(d, 'code', TerminationReasonCode),
-                   parameters=d.get('parameters', None),
-                   type=_enum(d, 'type', TerminationReasonType))
+        return cls(
+            code=_enum(d, "code", TerminationReasonCode),
+            parameters=d.get("parameters", None),
+            type=_enum(d, "type", TerminationReasonType),
+        )
 
 
 class TerminationReasonCode(Enum):
     """status code indicating why the cluster was terminated"""
 
-    ABUSE_DETECTED = 'ABUSE_DETECTED'
-    ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE'
-    AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE'
-    AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = 'AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE'
-    AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE'
-    AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE'
-    AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED'
-    AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE'
-    AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE'
-    AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE'
-    AZURE_INVALID_DEPLOYMENT_TEMPLATE = 'AZURE_INVALID_DEPLOYMENT_TEMPLATE'
-    AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = 'AZURE_OPERATION_NOT_ALLOWED_EXCEPTION'
-    AZURE_QUOTA_EXCEEDED_EXCEPTION = 'AZURE_QUOTA_EXCEEDED_EXCEPTION'
-    AZURE_RESOURCE_MANAGER_THROTTLING = 'AZURE_RESOURCE_MANAGER_THROTTLING'
-    AZURE_RESOURCE_PROVIDER_THROTTLING = 'AZURE_RESOURCE_PROVIDER_THROTTLING'
-    AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = 'AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE'
-    AZURE_VM_EXTENSION_FAILURE = 'AZURE_VM_EXTENSION_FAILURE'
-    AZURE_VNET_CONFIGURATION_FAILURE = 'AZURE_VNET_CONFIGURATION_FAILURE'
-    BOOTSTRAP_TIMEOUT = 'BOOTSTRAP_TIMEOUT'
-    BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = 'BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION'
-    CLOUD_PROVIDER_DISK_SETUP_FAILURE = 'CLOUD_PROVIDER_DISK_SETUP_FAILURE'
-    CLOUD_PROVIDER_LAUNCH_FAILURE = 'CLOUD_PROVIDER_LAUNCH_FAILURE'
-    CLOUD_PROVIDER_RESOURCE_STOCKOUT = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT'
-    CLOUD_PROVIDER_SHUTDOWN = 'CLOUD_PROVIDER_SHUTDOWN'
-    COMMUNICATION_LOST = 'COMMUNICATION_LOST'
-    CONTAINER_LAUNCH_FAILURE = 'CONTAINER_LAUNCH_FAILURE'
-    CONTROL_PLANE_REQUEST_FAILURE = 'CONTROL_PLANE_REQUEST_FAILURE'
-    DATABASE_CONNECTION_FAILURE = 'DATABASE_CONNECTION_FAILURE'
-    DBFS_COMPONENT_UNHEALTHY = 'DBFS_COMPONENT_UNHEALTHY'
-    DOCKER_IMAGE_PULL_FAILURE = 'DOCKER_IMAGE_PULL_FAILURE'
-    DRIVER_UNREACHABLE = 'DRIVER_UNREACHABLE'
-    DRIVER_UNRESPONSIVE = 'DRIVER_UNRESPONSIVE'
-    EXECUTION_COMPONENT_UNHEALTHY = 'EXECUTION_COMPONENT_UNHEALTHY'
-    GCP_QUOTA_EXCEEDED = 'GCP_QUOTA_EXCEEDED'
-    GCP_SERVICE_ACCOUNT_DELETED = 'GCP_SERVICE_ACCOUNT_DELETED'
-    GLOBAL_INIT_SCRIPT_FAILURE = 'GLOBAL_INIT_SCRIPT_FAILURE'
-    HIVE_METASTORE_PROVISIONING_FAILURE = 'HIVE_METASTORE_PROVISIONING_FAILURE'
-    IMAGE_PULL_PERMISSION_DENIED = 'IMAGE_PULL_PERMISSION_DENIED'
-    INACTIVITY = 'INACTIVITY'
-    INIT_SCRIPT_FAILURE = 'INIT_SCRIPT_FAILURE'
-    INSTANCE_POOL_CLUSTER_FAILURE = 'INSTANCE_POOL_CLUSTER_FAILURE'
-    INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    INVALID_ARGUMENT = 'INVALID_ARGUMENT'
-    INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE'
-    IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE'
-    JOB_FINISHED = 'JOB_FINISHED'
-    K8S_AUTOSCALING_FAILURE = 'K8S_AUTOSCALING_FAILURE'
-    K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = 'K8S_DBR_CLUSTER_LAUNCH_TIMEOUT'
-    METASTORE_COMPONENT_UNHEALTHY = 'METASTORE_COMPONENT_UNHEALTHY'
-    NEPHOS_RESOURCE_MANAGEMENT = 'NEPHOS_RESOURCE_MANAGEMENT'
-    NETWORK_CONFIGURATION_FAILURE = 'NETWORK_CONFIGURATION_FAILURE'
-    NFS_MOUNT_FAILURE = 'NFS_MOUNT_FAILURE'
-    NPIP_TUNNEL_SETUP_FAILURE = 'NPIP_TUNNEL_SETUP_FAILURE'
-    NPIP_TUNNEL_TOKEN_FAILURE = 'NPIP_TUNNEL_TOKEN_FAILURE'
-    REQUEST_REJECTED = 'REQUEST_REJECTED'
-    REQUEST_THROTTLED = 'REQUEST_THROTTLED'
-    SECRET_RESOLUTION_ERROR = 'SECRET_RESOLUTION_ERROR'
-    SECURITY_DAEMON_REGISTRATION_EXCEPTION = 'SECURITY_DAEMON_REGISTRATION_EXCEPTION'
-    SELF_BOOTSTRAP_FAILURE = 'SELF_BOOTSTRAP_FAILURE'
-    SKIPPED_SLOW_NODES = 'SKIPPED_SLOW_NODES'
-    SLOW_IMAGE_DOWNLOAD = 'SLOW_IMAGE_DOWNLOAD'
-    SPARK_ERROR = 'SPARK_ERROR'
-    SPARK_IMAGE_DOWNLOAD_FAILURE = 'SPARK_IMAGE_DOWNLOAD_FAILURE'
-    SPARK_STARTUP_FAILURE = 'SPARK_STARTUP_FAILURE'
-    SPOT_INSTANCE_TERMINATION = 'SPOT_INSTANCE_TERMINATION'
-    STORAGE_DOWNLOAD_FAILURE = 'STORAGE_DOWNLOAD_FAILURE'
-    STS_CLIENT_SETUP_FAILURE = 'STS_CLIENT_SETUP_FAILURE'
-    SUBNET_EXHAUSTED_FAILURE = 'SUBNET_EXHAUSTED_FAILURE'
-    TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE'
-    TRIAL_EXPIRED = 'TRIAL_EXPIRED'
-    UNEXPECTED_LAUNCH_FAILURE = 'UNEXPECTED_LAUNCH_FAILURE'
-    UNKNOWN = 'UNKNOWN'
-    UNSUPPORTED_INSTANCE_TYPE = 'UNSUPPORTED_INSTANCE_TYPE'
-    UPDATE_INSTANCE_PROFILE_FAILURE = 'UPDATE_INSTANCE_PROFILE_FAILURE'
-    USER_REQUEST = 'USER_REQUEST'
-    WORKER_SETUP_FAILURE = 'WORKER_SETUP_FAILURE'
-    WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR'
-    WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR'
+    ABUSE_DETECTED = "ABUSE_DETECTED"
+    ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE"
+    AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE"
+    AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE"
+    AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE"
+    AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE"
+    AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED"
+    AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE"
+    AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE"
+    AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE"
+    AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE"
+    AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION"
+    AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION"
+    AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING"
+    AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING"
+    AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE"
+    AZURE_VM_EXTENSION_FAILURE = "AZURE_VM_EXTENSION_FAILURE"
+    AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE"
+    BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT"
+    BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION"
+    CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE"
+    CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE"
+    CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT"
+    CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN"
+    COMMUNICATION_LOST = "COMMUNICATION_LOST"
+    CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE"
+    CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE"
+    DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE"
+    DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY"
+    DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE"
+    DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE"
+    DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE"
+    EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY"
+    GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED"
+    GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED"
+    GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE"
+    HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE"
+    IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED"
+    INACTIVITY = "INACTIVITY"
+    INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE"
+    INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE"
+    INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    INVALID_ARGUMENT = "INVALID_ARGUMENT"
+    INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE"
+    IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE"
+    JOB_FINISHED = "JOB_FINISHED"
+    K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE"
+    K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT"
+    METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY"
+    NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT"
+    NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE"
+    NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE"
+    NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE"
+    NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE"
+    REQUEST_REJECTED = "REQUEST_REJECTED"
+    REQUEST_THROTTLED = "REQUEST_THROTTLED"
+    SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR"
+    SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION"
+    SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE"
+    SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES"
+    SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD"
+    SPARK_ERROR = "SPARK_ERROR"
+    SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE"
+    SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE"
+    SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION"
+    STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE"
+    STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE"
+    SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE"
+    TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE"
+    TRIAL_EXPIRED = "TRIAL_EXPIRED"
+    UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE"
+    UNKNOWN = "UNKNOWN"
+    UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE"
+    UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE"
+    USER_REQUEST = "USER_REQUEST"
+    WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE"
+    WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR"
+    WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR"
 
 
 class TerminationReasonType(Enum):
     """type of the termination"""
 
-    CLIENT_ERROR = 'CLIENT_ERROR'
-    CLOUD_FAILURE = 'CLOUD_FAILURE'
-    SERVICE_FAULT = 'SERVICE_FAULT'
-    SUCCESS = 'SUCCESS'
+    CLIENT_ERROR = "CLIENT_ERROR"
+    CLOUD_FAILURE = "CLOUD_FAILURE"
+    SERVICE_FAULT = "SERVICE_FAULT"
+    SUCCESS = "SUCCESS"
 
 
 @dataclass
@@ -7644,21 +8971,28 @@ class UninstallLibraries:
     def as_dict(self) -> dict:
         """Serializes the UninstallLibraries into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.libraries: body['libraries'] = self.libraries
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.libraries:
+            body["libraries"] = self.libraries
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UninstallLibraries:
         """Deserializes the UninstallLibraries from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated_dict(d, 'libraries', Library))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            libraries=_repeated_dict(d, "libraries", Library),
+        )
 
 
 @dataclass
@@ -7688,19 +9022,21 @@ class UnpinCluster:
     def as_dict(self) -> dict:
         """Serializes the UnpinCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpinCluster:
         """Deserializes the UnpinCluster from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None))
+        return cls(cluster_id=d.get("cluster_id", None))
 
 
 @dataclass
@@ -7739,25 +9075,33 @@ class UpdateCluster:
     def as_dict(self) -> dict:
         """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster: body['cluster'] = self.cluster.as_dict()
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.cluster:
+            body["cluster"] = self.cluster.as_dict()
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster: body['cluster'] = self.cluster
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.cluster:
+            body["cluster"] = self.cluster
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCluster:
         """Deserializes the UpdateCluster from a dictionary."""
-        return cls(cluster=_from_dict(d, 'cluster', UpdateClusterResource),
-                   cluster_id=d.get('cluster_id', None),
-                   update_mask=d.get('update_mask', None))
+        return cls(
+            cluster=_from_dict(d, "cluster", UpdateClusterResource),
+            cluster_id=d.get("cluster_id", None),
+            update_mask=d.get("update_mask", None),
+        )
 
 
 @dataclass
@@ -7939,109 +9283,163 @@ class UpdateClusterResource:
     def as_dict(self) -> dict:
         """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value
-        if self.docker_image: body['docker_image'] = self.docker_image.as_dict()
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode.value
+        if self.docker_image:
+            body["docker_image"] = self.docker_image.as_dict()
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine.value
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateClusterResource into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
         if self.autotermination_minutes is not None:
-            body['autotermination_minutes'] = self.autotermination_minutes
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
-        if self.docker_image: body['docker_image'] = self.docker_image
+            body["autotermination_minutes"] = self.autotermination_minutes
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.cluster_name is not None:
+            body["cluster_name"] = self.cluster_name
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.data_security_mode is not None:
+            body["data_security_mode"] = self.data_security_mode
+        if self.docker_image:
+            body["docker_image"] = self.docker_image
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
-        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = self.enable_elastic_disk
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
-        if self.kind is not None: body['kind'] = self.kind
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
-        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.spark_version is not None: body['spark_version'] = self.spark_version
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
-        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
-        if self.workload_type: body['workload_type'] = self.workload_type
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.is_single_node is not None:
+            body["is_single_node"] = self.is_single_node
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.runtime_engine is not None:
+            body["runtime_engine"] = self.runtime_engine
+        if self.single_user_name is not None:
+            body["single_user_name"] = self.single_user_name
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.spark_version is not None:
+            body["spark_version"] = self.spark_version
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
+        if self.use_ml_runtime is not None:
+            body["use_ml_runtime"] = self.use_ml_runtime
+        if self.workload_type:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
         """Deserializes the UpdateClusterResource from a dictionary."""
-        return cls(autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_name=d.get('cluster_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   is_single_node=d.get('is_single_node', None),
-                   kind=_enum(d, 'kind', Kind),
-                   node_type_id=d.get('node_type_id', None),
-                   num_workers=d.get('num_workers', None),
-                   policy_id=d.get('policy_id', None),
-                   runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
-                   single_user_name=d.get('single_user_name', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   spark_version=d.get('spark_version', None),
-                   ssh_public_keys=d.get('ssh_public_keys', None),
-                   use_ml_runtime=d.get('use_ml_runtime', None),
-                   workload_type=_from_dict(d, 'workload_type', WorkloadType))
+        return cls(
+            autoscale=_from_dict(d, "autoscale", AutoScale),
+            autotermination_minutes=d.get("autotermination_minutes", None),
+            aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf),
+            cluster_name=d.get("cluster_name", None),
+            custom_tags=d.get("custom_tags", None),
+            data_security_mode=_enum(d, "data_security_mode", DataSecurityMode),
+            docker_image=_from_dict(d, "docker_image", DockerImage),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_elastic_disk=d.get("enable_elastic_disk", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            is_single_node=d.get("is_single_node", None),
+            kind=_enum(d, "kind", Kind),
+            node_type_id=d.get("node_type_id", None),
+            num_workers=d.get("num_workers", None),
+            policy_id=d.get("policy_id", None),
+            runtime_engine=_enum(d, "runtime_engine", RuntimeEngine),
+            single_user_name=d.get("single_user_name", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            spark_version=d.get("spark_version", None),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+            use_ml_runtime=d.get("use_ml_runtime", None),
+            workload_type=_from_dict(d, "workload_type", WorkloadType),
+        )
 
 
 @dataclass
@@ -8090,19 +9488,21 @@ class VolumesStorageInfo:
     def as_dict(self) -> dict:
         """Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the VolumesStorageInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VolumesStorageInfo:
         """Deserializes the VolumesStorageInfo from a dictionary."""
-        return cls(destination=d.get('destination', None))
+        return cls(destination=d.get("destination", None))
 
 
 @dataclass
@@ -8113,19 +9513,21 @@ class WorkloadType:
     def as_dict(self) -> dict:
         """Serializes the WorkloadType into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.clients: body['clients'] = self.clients.as_dict()
+        if self.clients:
+            body["clients"] = self.clients.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkloadType into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clients: body['clients'] = self.clients
+        if self.clients:
+            body["clients"] = self.clients
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkloadType:
         """Deserializes the WorkloadType from a dictionary."""
-        return cls(clients=_from_dict(d, 'clients', ClientsTypes))
+        return cls(clients=_from_dict(d, "clients", ClientsTypes))
 
 
 @dataclass
@@ -8136,59 +9538,63 @@ class WorkspaceStorageInfo:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceStorageInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceStorageInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
+        if self.destination is not None:
+            body["destination"] = self.destination
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceStorageInfo:
         """Deserializes the WorkspaceStorageInfo from a dictionary."""
-        return cls(destination=d.get('destination', None))
+        return cls(destination=d.get("destination", None))
 
 
 class ClusterPoliciesAPI:
     """You can use cluster policies to control users' ability to configure clusters based on a set of rules.
     These rules specify which attributes or attribute values can be used during cluster creation. Cluster
     policies have ACLs that limit their use to specific users and groups.
-    
+
     With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in
     the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed
     settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some
     fields. - Manage costs by setting limits on attributes that impact the hourly rate.
-    
+
     Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user
     creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted
     policy and create fully-configurable clusters. - A user who has both unrestricted cluster create
     permission and access to cluster policies can select the Unrestricted policy and policies they have access
     to. - A user that has access to only cluster policies, can select the policies they have access to.
-    
+
     If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create,
     edit, and delete policies. Admin users also have access to all policies."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               definition: Optional[str] = None,
-               description: Optional[str] = None,
-               libraries: Optional[List[Library]] = None,
-               max_clusters_per_user: Optional[int] = None,
-               name: Optional[str] = None,
-               policy_family_definition_overrides: Optional[str] = None,
-               policy_family_id: Optional[str] = None) -> CreatePolicyResponse:
+    def create(
+        self,
+        *,
+        definition: Optional[str] = None,
+        description: Optional[str] = None,
+        libraries: Optional[List[Library]] = None,
+        max_clusters_per_user: Optional[int] = None,
+        name: Optional[str] = None,
+        policy_family_definition_overrides: Optional[str] = None,
+        policy_family_id: Optional[str] = None,
+    ) -> CreatePolicyResponse:
         """Create a new policy.
-        
+
         Creates a new policy with prescribed settings.
-        
+
         :param definition: str (optional)
           Policy definition document expressed in [Databricks Cluster Policy Definition Language].
-          
+
           [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
         :param description: str (optional)
           Additional human-readable description of the cluster policy.
@@ -8204,70 +9610,95 @@ def create(self,
         :param policy_family_definition_overrides: str (optional)
           Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
           document must be passed as a string and cannot be embedded in the requests.
-          
+
           You can use this to customize the policy definition inherited from the policy family. Policy rules
           specified here are merged into the inherited policy definition.
-          
+
           [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
         :param policy_family_id: str (optional)
           ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
           definition.
-          
+
           Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
           policy definition.
-        
+
         :returns: :class:`CreatePolicyResponse`
         """
         body = {}
-        if definition is not None: body['definition'] = definition
-        if description is not None: body['description'] = description
-        if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
-        if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user
-        if name is not None: body['name'] = name
+        if definition is not None:
+            body["definition"] = definition
+        if description is not None:
+            body["description"] = description
+        if libraries is not None:
+            body["libraries"] = [v.as_dict() for v in libraries]
+        if max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = max_clusters_per_user
+        if name is not None:
+            body["name"] = name
         if policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = policy_family_definition_overrides
-        if policy_family_id is not None: body['policy_family_id'] = policy_family_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/policies/clusters/create', body=body, headers=headers)
+            body["policy_family_definition_overrides"] = policy_family_definition_overrides
+        if policy_family_id is not None:
+            body["policy_family_id"] = policy_family_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/policies/clusters/create",
+            body=body,
+            headers=headers,
+        )
         return CreatePolicyResponse.from_dict(res)
 
     def delete(self, policy_id: str):
         """Delete a cluster policy.
-        
+
         Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.
-        
+
         :param policy_id: str
           The ID of the policy to delete.
-        
-        
+
+
         """
         body = {}
-        if policy_id is not None: body['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/policies/clusters/delete', body=body, headers=headers)
-
-    def edit(self,
-             policy_id: str,
-             *,
-             definition: Optional[str] = None,
-             description: Optional[str] = None,
-             libraries: Optional[List[Library]] = None,
-             max_clusters_per_user: Optional[int] = None,
-             name: Optional[str] = None,
-             policy_family_definition_overrides: Optional[str] = None,
-             policy_family_id: Optional[str] = None):
+        if policy_id is not None:
+            body["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/policies/clusters/delete",
+            body=body,
+            headers=headers,
+        )
+
+    def edit(
+        self,
+        policy_id: str,
+        *,
+        definition: Optional[str] = None,
+        description: Optional[str] = None,
+        libraries: Optional[List[Library]] = None,
+        max_clusters_per_user: Optional[int] = None,
+        name: Optional[str] = None,
+        policy_family_definition_overrides: Optional[str] = None,
+        policy_family_id: Optional[str] = None,
+    ):
         """Update a cluster policy.
-        
+
         Update an existing policy for cluster. This operation may make some clusters governed by the previous
         policy invalid.
-        
+
         :param policy_id: str
           The ID of the policy to update.
         :param definition: str (optional)
           Policy definition document expressed in [Databricks Cluster Policy Definition Language].
-          
+
           [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
         :param description: str (optional)
           Additional human-readable description of the cluster policy.
@@ -8283,113 +9714,155 @@ def edit(self,
         :param policy_family_definition_overrides: str (optional)
           Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
           document must be passed as a string and cannot be embedded in the requests.
-          
+
           You can use this to customize the policy definition inherited from the policy family. Policy rules
           specified here are merged into the inherited policy definition.
-          
+
           [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
         :param policy_family_id: str (optional)
           ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
           definition.
-          
+
           Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
           policy definition.
-        
-        
+
+
         """
         body = {}
-        if definition is not None: body['definition'] = definition
-        if description is not None: body['description'] = description
-        if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
-        if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user
-        if name is not None: body['name'] = name
+        if definition is not None:
+            body["definition"] = definition
+        if description is not None:
+            body["description"] = description
+        if libraries is not None:
+            body["libraries"] = [v.as_dict() for v in libraries]
+        if max_clusters_per_user is not None:
+            body["max_clusters_per_user"] = max_clusters_per_user
+        if name is not None:
+            body["name"] = name
         if policy_family_definition_overrides is not None:
-            body['policy_family_definition_overrides'] = policy_family_definition_overrides
-        if policy_family_id is not None: body['policy_family_id'] = policy_family_id
-        if policy_id is not None: body['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/policies/clusters/edit', body=body, headers=headers)
+            body["policy_family_definition_overrides"] = policy_family_definition_overrides
+        if policy_family_id is not None:
+            body["policy_family_id"] = policy_family_id
+        if policy_id is not None:
+            body["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/policies/clusters/edit",
+            body=body,
+            headers=headers,
+        )
 
     def get(self, policy_id: str) -> Policy:
         """Get a cluster policy.
-        
+
         Get a cluster policy entity. Creation and editing is available to admins only.
-        
+
         :param policy_id: str
           Canonical unique identifier for the Cluster Policy.
-        
+
         :returns: :class:`Policy`
         """
 
         query = {}
-        if policy_id is not None: query['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/policies/clusters/get', query=query, headers=headers)
+        if policy_id is not None:
+            query["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/policies/clusters/get",
+            query=query,
+            headers=headers,
+        )
         return Policy.from_dict(res)
 
     def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse:
         """Get cluster policy permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
-        
+
         :returns: :class:`GetClusterPolicyPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels",
+            headers=headers,
+        )
         return GetClusterPolicyPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions:
         """Get cluster policy permissions.
-        
+
         Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root
         object.
-        
+
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
-        
+
         :returns: :class:`ClusterPolicyPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}",
+            headers=headers,
+        )
         return ClusterPolicyPermissions.from_dict(res)
 
-    def list(self,
-             *,
-             sort_column: Optional[ListSortColumn] = None,
-             sort_order: Optional[ListSortOrder] = None) -> Iterator[Policy]:
+    def list(
+        self,
+        *,
+        sort_column: Optional[ListSortColumn] = None,
+        sort_order: Optional[ListSortOrder] = None,
+    ) -> Iterator[Policy]:
         """List cluster policies.
-        
+
         Returns a list of policies accessible by the requesting user.
-        
+
         :param sort_column: :class:`ListSortColumn` (optional)
           The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy
           creation time. * `POLICY_NAME` - Sort result list by policy name.
         :param sort_order: :class:`ListSortOrder` (optional)
           The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC`
           - Sort result list in ascending order.
-        
+
         :returns: Iterator over :class:`Policy`
         """
 
         query = {}
-        if sort_column is not None: query['sort_column'] = sort_column.value
-        if sort_order is not None: query['sort_order'] = sort_order.value
-        headers = {'Accept': 'application/json', }
-
-        json = self._api.do('GET', '/api/2.0/policies/clusters/list', query=query, headers=headers)
+        if sort_column is not None:
+            query["sort_column"] = sort_column.value
+        if sort_order is not None:
+            query["sort_order"] = sort_order.value
+        headers = {
+            "Accept": "application/json",
+        }
+
+        json = self._api.do(
+            "GET",
+            "/api/2.0/policies/clusters/list",
+            query=query,
+            headers=headers,
+        )
         parsed = ListPoliciesResponse.from_dict(json).policies
         return parsed if parsed is not None else []
 
@@ -8397,77 +9870,87 @@ def set_permissions(
         self,
         cluster_policy_id: str,
         *,
-        access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None
+        access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None,
     ) -> ClusterPolicyPermissions:
         """Set cluster policy permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
         :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ClusterPolicyPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}",
+            body=body,
+            headers=headers,
+        )
         return ClusterPolicyPermissions.from_dict(res)
 
     def update_permissions(
         self,
         cluster_policy_id: str,
         *,
-        access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None
+        access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None,
     ) -> ClusterPolicyPermissions:
         """Update cluster policy permissions.
-        
+
         Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root
         object.
-        
+
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
         :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ClusterPolicyPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}",
+            body=body,
+            headers=headers,
+        )
         return ClusterPolicyPermissions.from_dict(res)
 
 
 class ClustersAPI:
     """The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
-    
+
     Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing
     page for a list of the supported instance types and their corresponding DBUs.
-    
+
     A Databricks cluster is a set of computation resources and configurations on which you run data
     engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming
     analytics, ad-hoc analytics, and machine learning.
-    
+
     You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a
     distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data
     collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs.
-    
+
     You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and
     restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
     analysis.
-    
+
     IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
     keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
     administrator can pin a cluster to the cluster list."""
@@ -8476,14 +9959,18 @@ def __init__(self, api_client):
         self._api = api_client
 
     def wait_get_cluster_running(
-            self,
-            cluster_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails:
+        self,
+        cluster_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[ClusterDetails], None]] = None,
+    ) -> ClusterDetails:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (State.RUNNING, )
-        failure_states = (State.ERROR, State.TERMINATED, )
-        status_message = 'polling...'
+        target_states = (State.RUNNING,)
+        failure_states = (
+            State.ERROR,
+            State.TERMINATED,
+        )
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(cluster_id=cluster_id)
@@ -8494,27 +9981,28 @@ def wait_get_cluster_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach RUNNING, got {status}: {status_message}'
+                msg = f"failed to reach RUNNING, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"cluster_id={cluster_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def wait_get_cluster_terminated(
-            self,
-            cluster_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails:
+        self,
+        cluster_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[ClusterDetails], None]] = None,
+    ) -> ClusterDetails:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (State.TERMINATED, )
-        failure_states = (State.ERROR, )
-        status_message = 'polling...'
+        target_states = (State.TERMINATED,)
+        failure_states = (State.ERROR,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(cluster_id=cluster_id)
@@ -8525,86 +10013,98 @@ def wait_get_cluster_terminated(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach TERMINATED, got {status}: {status_message}'
+                msg = f"failed to reach TERMINATED, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"cluster_id={cluster_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def change_owner(self, cluster_id: str, owner_username: str):
         """Change cluster owner.
-        
+
         Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform
         this operation. The service principal application ID can be supplied as an argument to
         `owner_username`.
-        
+
         :param cluster_id: str
           
         :param owner_username: str
           New owner of the cluster_id after this RPC.
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if owner_username is not None: body['owner_username'] = owner_username
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.1/clusters/change-owner', body=body, headers=headers)
-
-    def create(self,
-               spark_version: str,
-               *,
-               apply_policy_default_values: Optional[bool] = None,
-               autoscale: Optional[AutoScale] = None,
-               autotermination_minutes: Optional[int] = None,
-               aws_attributes: Optional[AwsAttributes] = None,
-               azure_attributes: Optional[AzureAttributes] = None,
-               clone_from: Optional[CloneCluster] = None,
-               cluster_log_conf: Optional[ClusterLogConf] = None,
-               cluster_name: Optional[str] = None,
-               custom_tags: Optional[Dict[str, str]] = None,
-               data_security_mode: Optional[DataSecurityMode] = None,
-               docker_image: Optional[DockerImage] = None,
-               driver_instance_pool_id: Optional[str] = None,
-               driver_node_type_id: Optional[str] = None,
-               enable_elastic_disk: Optional[bool] = None,
-               enable_local_disk_encryption: Optional[bool] = None,
-               gcp_attributes: Optional[GcpAttributes] = None,
-               init_scripts: Optional[List[InitScriptInfo]] = None,
-               instance_pool_id: Optional[str] = None,
-               is_single_node: Optional[bool] = None,
-               kind: Optional[Kind] = None,
-               node_type_id: Optional[str] = None,
-               num_workers: Optional[int] = None,
-               policy_id: Optional[str] = None,
-               runtime_engine: Optional[RuntimeEngine] = None,
-               single_user_name: Optional[str] = None,
-               spark_conf: Optional[Dict[str, str]] = None,
-               spark_env_vars: Optional[Dict[str, str]] = None,
-               ssh_public_keys: Optional[List[str]] = None,
-               use_ml_runtime: Optional[bool] = None,
-               workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]:
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if owner_username is not None:
+            body["owner_username"] = owner_username
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.1/clusters/change-owner",
+            body=body,
+            headers=headers,
+        )
+
+    def create(
+        self,
+        spark_version: str,
+        *,
+        apply_policy_default_values: Optional[bool] = None,
+        autoscale: Optional[AutoScale] = None,
+        autotermination_minutes: Optional[int] = None,
+        aws_attributes: Optional[AwsAttributes] = None,
+        azure_attributes: Optional[AzureAttributes] = None,
+        clone_from: Optional[CloneCluster] = None,
+        cluster_log_conf: Optional[ClusterLogConf] = None,
+        cluster_name: Optional[str] = None,
+        custom_tags: Optional[Dict[str, str]] = None,
+        data_security_mode: Optional[DataSecurityMode] = None,
+        docker_image: Optional[DockerImage] = None,
+        driver_instance_pool_id: Optional[str] = None,
+        driver_node_type_id: Optional[str] = None,
+        enable_elastic_disk: Optional[bool] = None,
+        enable_local_disk_encryption: Optional[bool] = None,
+        gcp_attributes: Optional[GcpAttributes] = None,
+        init_scripts: Optional[List[InitScriptInfo]] = None,
+        instance_pool_id: Optional[str] = None,
+        is_single_node: Optional[bool] = None,
+        kind: Optional[Kind] = None,
+        node_type_id: Optional[str] = None,
+        num_workers: Optional[int] = None,
+        policy_id: Optional[str] = None,
+        runtime_engine: Optional[RuntimeEngine] = None,
+        single_user_name: Optional[str] = None,
+        spark_conf: Optional[Dict[str, str]] = None,
+        spark_env_vars: Optional[Dict[str, str]] = None,
+        ssh_public_keys: Optional[List[str]] = None,
+        use_ml_runtime: Optional[bool] = None,
+        workload_type: Optional[WorkloadType] = None,
+    ) -> Wait[ClusterDetails]:
         """Create new cluster.
-        
+
         Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
         necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud
         provider limitations (account limits, spot price, etc.) or transient network issues.
-        
+
         If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
         Otherwise the cluster will terminate with an informative error message.
-        
+
         Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
         the [create compute UI] and then copying the generated JSON definition from the UI.
-        
+
         [create compute UI]: https://docs.databricks.com/compute/configure.html
-        
+
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -8638,18 +10138,18 @@ def create(self,
         :param custom_tags: Dict[str,str] (optional)
           Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
           instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
+
           - Currently, Databricks allows at most 45 custom tags
-          
+
           - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
-          
+
           The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
           choose the most appropriate access mode depending on your compute configuration. *
           `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
           for `SINGLE_USER`.
-          
+
           The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
           users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
           A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
@@ -8658,10 +10158,10 @@ def create(self,
           fully isolated so that they cannot see each other's data and credentials. Most data governance
           features are supported in this mode. But programming languages and cluster features might be
           limited.
-          
+
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
-          
+
           * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
           `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
           clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
@@ -8691,14 +10191,14 @@ def create(self,
           The optional ID of the instance pool to which the cluster belongs.
         :param is_single_node: bool (optional)
           This field can only be used with `kind`.
-          
+
           When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
           and `num_workers`
         :param kind: :class:`Kind` (optional)
           The kind of compute described by this compute specification.
-          
+
           Depending on `kind`, different validations and default values will be applied.
-          
+
           The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
@@ -8708,7 +10208,7 @@ def create(self,
         :param num_workers: int (optional)
           Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
           `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-          
+
           Note: When reading the properties of a cluster, this field reflects the desired number of workers
           rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
           workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
@@ -8718,10 +10218,10 @@ def create(self,
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
           Determines the cluster's runtime engine, either standard or Photon.
-          
+
           This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
           `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-          
+
           If left unspecified, the runtime engine defaults to standard unless the spark_version contains
           -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
@@ -8734,11 +10234,11 @@ def create(self,
           An object containing a set of optional, user-specified environment variable key-value pairs. Please
           note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
           launching the driver and workers.
-          
+
           In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
           `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
           managed environmental variables are included as well.
-          
+
           Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
           "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
           -Dspark.shuffle.service.enabled=true"}`
@@ -8748,55 +10248,89 @@ def create(self,
           specified.
         :param use_ml_runtime: bool (optional)
           This field can only be used with `kind`.
-          
+
           `effective_spark_version` is determined by `spark_version` (DBR release), this field
           `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_running for more details.
         """
         body = {}
         if apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = apply_policy_default_values
-        if autoscale is not None: body['autoscale'] = autoscale.as_dict()
-        if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes
-        if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict()
-        if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict()
-        if clone_from is not None: body['clone_from'] = clone_from.as_dict()
-        if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict()
-        if cluster_name is not None: body['cluster_name'] = cluster_name
-        if custom_tags is not None: body['custom_tags'] = custom_tags
-        if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value
-        if docker_image is not None: body['docker_image'] = docker_image.as_dict()
-        if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id
-        if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id
-        if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk
+            body["apply_policy_default_values"] = apply_policy_default_values
+        if autoscale is not None:
+            body["autoscale"] = autoscale.as_dict()
+        if autotermination_minutes is not None:
+            body["autotermination_minutes"] = autotermination_minutes
+        if aws_attributes is not None:
+            body["aws_attributes"] = aws_attributes.as_dict()
+        if azure_attributes is not None:
+            body["azure_attributes"] = azure_attributes.as_dict()
+        if clone_from is not None:
+            body["clone_from"] = clone_from.as_dict()
+        if cluster_log_conf is not None:
+            body["cluster_log_conf"] = cluster_log_conf.as_dict()
+        if cluster_name is not None:
+            body["cluster_name"] = cluster_name
+        if custom_tags is not None:
+            body["custom_tags"] = custom_tags
+        if data_security_mode is not None:
+            body["data_security_mode"] = data_security_mode.value
+        if docker_image is not None:
+            body["docker_image"] = docker_image.as_dict()
+        if driver_instance_pool_id is not None:
+            body["driver_instance_pool_id"] = driver_instance_pool_id
+        if driver_node_type_id is not None:
+            body["driver_node_type_id"] = driver_node_type_id
+        if enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = enable_elastic_disk
         if enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = enable_local_disk_encryption
-        if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
-        if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts]
-        if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
-        if is_single_node is not None: body['is_single_node'] = is_single_node
-        if kind is not None: body['kind'] = kind.value
-        if node_type_id is not None: body['node_type_id'] = node_type_id
-        if num_workers is not None: body['num_workers'] = num_workers
-        if policy_id is not None: body['policy_id'] = policy_id
-        if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value
-        if single_user_name is not None: body['single_user_name'] = single_user_name
-        if spark_conf is not None: body['spark_conf'] = spark_conf
-        if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars
-        if spark_version is not None: body['spark_version'] = spark_version
-        if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys]
-        if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime
-        if workload_type is not None: body['workload_type'] = workload_type.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/clusters/create', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_running,
-                    response=CreateClusterResponse.from_dict(op_response),
-                    cluster_id=op_response['cluster_id'])
+            body["enable_local_disk_encryption"] = enable_local_disk_encryption
+        if gcp_attributes is not None:
+            body["gcp_attributes"] = gcp_attributes.as_dict()
+        if init_scripts is not None:
+            body["init_scripts"] = [v.as_dict() for v in init_scripts]
+        if instance_pool_id is not None:
+            body["instance_pool_id"] = instance_pool_id
+        if is_single_node is not None:
+            body["is_single_node"] = is_single_node
+        if kind is not None:
+            body["kind"] = kind.value
+        if node_type_id is not None:
+            body["node_type_id"] = node_type_id
+        if num_workers is not None:
+            body["num_workers"] = num_workers
+        if policy_id is not None:
+            body["policy_id"] = policy_id
+        if runtime_engine is not None:
+            body["runtime_engine"] = runtime_engine.value
+        if single_user_name is not None:
+            body["single_user_name"] = single_user_name
+        if spark_conf is not None:
+            body["spark_conf"] = spark_conf
+        if spark_env_vars is not None:
+            body["spark_env_vars"] = spark_env_vars
+        if spark_version is not None:
+            body["spark_version"] = spark_version
+        if ssh_public_keys is not None:
+            body["ssh_public_keys"] = [v for v in ssh_public_keys]
+        if use_ml_runtime is not None:
+            body["use_ml_runtime"] = use_ml_runtime
+        if workload_type is not None:
+            body["workload_type"] = workload_type.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/clusters/create", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_running,
+            response=CreateClusterResponse.from_dict(op_response),
+            cluster_id=op_response["cluster_id"],
+        )
 
     def create_and_wait(
         self,
@@ -8832,112 +10366,123 @@ def create_and_wait(
         ssh_public_keys: Optional[List[str]] = None,
         use_ml_runtime: Optional[bool] = None,
         workload_type: Optional[WorkloadType] = None,
-        timeout=timedelta(minutes=20)) -> ClusterDetails:
-        return self.create(apply_policy_default_values=apply_policy_default_values,
-                           autoscale=autoscale,
-                           autotermination_minutes=autotermination_minutes,
-                           aws_attributes=aws_attributes,
-                           azure_attributes=azure_attributes,
-                           clone_from=clone_from,
-                           cluster_log_conf=cluster_log_conf,
-                           cluster_name=cluster_name,
-                           custom_tags=custom_tags,
-                           data_security_mode=data_security_mode,
-                           docker_image=docker_image,
-                           driver_instance_pool_id=driver_instance_pool_id,
-                           driver_node_type_id=driver_node_type_id,
-                           enable_elastic_disk=enable_elastic_disk,
-                           enable_local_disk_encryption=enable_local_disk_encryption,
-                           gcp_attributes=gcp_attributes,
-                           init_scripts=init_scripts,
-                           instance_pool_id=instance_pool_id,
-                           is_single_node=is_single_node,
-                           kind=kind,
-                           node_type_id=node_type_id,
-                           num_workers=num_workers,
-                           policy_id=policy_id,
-                           runtime_engine=runtime_engine,
-                           single_user_name=single_user_name,
-                           spark_conf=spark_conf,
-                           spark_env_vars=spark_env_vars,
-                           spark_version=spark_version,
-                           ssh_public_keys=ssh_public_keys,
-                           use_ml_runtime=use_ml_runtime,
-                           workload_type=workload_type).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> ClusterDetails:
+        return self.create(
+            apply_policy_default_values=apply_policy_default_values,
+            autoscale=autoscale,
+            autotermination_minutes=autotermination_minutes,
+            aws_attributes=aws_attributes,
+            azure_attributes=azure_attributes,
+            clone_from=clone_from,
+            cluster_log_conf=cluster_log_conf,
+            cluster_name=cluster_name,
+            custom_tags=custom_tags,
+            data_security_mode=data_security_mode,
+            docker_image=docker_image,
+            driver_instance_pool_id=driver_instance_pool_id,
+            driver_node_type_id=driver_node_type_id,
+            enable_elastic_disk=enable_elastic_disk,
+            enable_local_disk_encryption=enable_local_disk_encryption,
+            gcp_attributes=gcp_attributes,
+            init_scripts=init_scripts,
+            instance_pool_id=instance_pool_id,
+            is_single_node=is_single_node,
+            kind=kind,
+            node_type_id=node_type_id,
+            num_workers=num_workers,
+            policy_id=policy_id,
+            runtime_engine=runtime_engine,
+            single_user_name=single_user_name,
+            spark_conf=spark_conf,
+            spark_env_vars=spark_env_vars,
+            spark_version=spark_version,
+            ssh_public_keys=ssh_public_keys,
+            use_ml_runtime=use_ml_runtime,
+            workload_type=workload_type,
+        ).result(timeout=timeout)
 
     def delete(self, cluster_id: str) -> Wait[ClusterDetails]:
         """Terminate cluster.
-        
+
         Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the
         termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a
         `TERMINATING` or `TERMINATED` state, nothing will happen.
-        
+
         :param cluster_id: str
           The cluster to be terminated.
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_terminated for more details.
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        op_response = self._api.do('POST', '/api/2.1/clusters/delete', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_terminated,
-                    response=DeleteClusterResponse.from_dict(op_response),
-                    cluster_id=cluster_id)
+        op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_terminated,
+            response=DeleteClusterResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+        )
 
     def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
         return self.delete(cluster_id=cluster_id).result(timeout=timeout)
 
-    def edit(self,
-             cluster_id: str,
-             spark_version: str,
-             *,
-             apply_policy_default_values: Optional[bool] = None,
-             autoscale: Optional[AutoScale] = None,
-             autotermination_minutes: Optional[int] = None,
-             aws_attributes: Optional[AwsAttributes] = None,
-             azure_attributes: Optional[AzureAttributes] = None,
-             cluster_log_conf: Optional[ClusterLogConf] = None,
-             cluster_name: Optional[str] = None,
-             custom_tags: Optional[Dict[str, str]] = None,
-             data_security_mode: Optional[DataSecurityMode] = None,
-             docker_image: Optional[DockerImage] = None,
-             driver_instance_pool_id: Optional[str] = None,
-             driver_node_type_id: Optional[str] = None,
-             enable_elastic_disk: Optional[bool] = None,
-             enable_local_disk_encryption: Optional[bool] = None,
-             gcp_attributes: Optional[GcpAttributes] = None,
-             init_scripts: Optional[List[InitScriptInfo]] = None,
-             instance_pool_id: Optional[str] = None,
-             is_single_node: Optional[bool] = None,
-             kind: Optional[Kind] = None,
-             node_type_id: Optional[str] = None,
-             num_workers: Optional[int] = None,
-             policy_id: Optional[str] = None,
-             runtime_engine: Optional[RuntimeEngine] = None,
-             single_user_name: Optional[str] = None,
-             spark_conf: Optional[Dict[str, str]] = None,
-             spark_env_vars: Optional[Dict[str, str]] = None,
-             ssh_public_keys: Optional[List[str]] = None,
-             use_ml_runtime: Optional[bool] = None,
-             workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]:
+    def edit(
+        self,
+        cluster_id: str,
+        spark_version: str,
+        *,
+        apply_policy_default_values: Optional[bool] = None,
+        autoscale: Optional[AutoScale] = None,
+        autotermination_minutes: Optional[int] = None,
+        aws_attributes: Optional[AwsAttributes] = None,
+        azure_attributes: Optional[AzureAttributes] = None,
+        cluster_log_conf: Optional[ClusterLogConf] = None,
+        cluster_name: Optional[str] = None,
+        custom_tags: Optional[Dict[str, str]] = None,
+        data_security_mode: Optional[DataSecurityMode] = None,
+        docker_image: Optional[DockerImage] = None,
+        driver_instance_pool_id: Optional[str] = None,
+        driver_node_type_id: Optional[str] = None,
+        enable_elastic_disk: Optional[bool] = None,
+        enable_local_disk_encryption: Optional[bool] = None,
+        gcp_attributes: Optional[GcpAttributes] = None,
+        init_scripts: Optional[List[InitScriptInfo]] = None,
+        instance_pool_id: Optional[str] = None,
+        is_single_node: Optional[bool] = None,
+        kind: Optional[Kind] = None,
+        node_type_id: Optional[str] = None,
+        num_workers: Optional[int] = None,
+        policy_id: Optional[str] = None,
+        runtime_engine: Optional[RuntimeEngine] = None,
+        single_user_name: Optional[str] = None,
+        spark_conf: Optional[Dict[str, str]] = None,
+        spark_env_vars: Optional[Dict[str, str]] = None,
+        ssh_public_keys: Optional[List[str]] = None,
+        use_ml_runtime: Optional[bool] = None,
+        workload_type: Optional[WorkloadType] = None,
+    ) -> Wait[ClusterDetails]:
         """Update cluster configuration.
-        
+
         Updates the configuration of a cluster to match the provided attributes and size. A cluster can be
         updated if it is in a `RUNNING` or `TERMINATED` state.
-        
+
         If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
         can take effect.
-        
+
         If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it
         is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update
         a cluster in any other state will be rejected with an `INVALID_STATE` error code.
-        
+
         Clusters created by the Databricks Jobs service cannot be edited.
-        
+
         :param cluster_id: str
           ID of the cluster
         :param spark_version: str
@@ -8971,18 +10516,18 @@ def edit(self,
         :param custom_tags: Dict[str,str] (optional)
           Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
           instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
+
           - Currently, Databricks allows at most 45 custom tags
-          
+
           - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
-          
+
           The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
           choose the most appropriate access mode depending on your compute configuration. *
           `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
           for `SINGLE_USER`.
-          
+
           The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
           users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
           A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
@@ -8991,10 +10536,10 @@ def edit(self,
           fully isolated so that they cannot see each other's data and credentials. Most data governance
           features are supported in this mode. But programming languages and cluster features might be
           limited.
-          
+
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
-          
+
           * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
           `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
           clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
@@ -9024,14 +10569,14 @@ def edit(self,
           The optional ID of the instance pool to which the cluster belongs.
         :param is_single_node: bool (optional)
           This field can only be used with `kind`.
-          
+
           When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
           and `num_workers`
         :param kind: :class:`Kind` (optional)
           The kind of compute described by this compute specification.
-          
+
           Depending on `kind`, different validations and default values will be applied.
-          
+
           The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
@@ -9041,7 +10586,7 @@ def edit(self,
         :param num_workers: int (optional)
           Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
           `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-          
+
           Note: When reading the properties of a cluster, this field reflects the desired number of workers
           rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
           workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
@@ -9051,10 +10596,10 @@ def edit(self,
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
           Determines the cluster's runtime engine, either standard or Photon.
-          
+
           This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
           `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-          
+
           If left unspecified, the runtime engine defaults to standard unless the spark_version contains
           -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
@@ -9067,11 +10612,11 @@ def edit(self,
           An object containing a set of optional, user-specified environment variable key-value pairs. Please
           note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
           launching the driver and workers.
-          
+
           In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
           `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
           managed environmental variables are included as well.
-          
+
           Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
           "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
           -Dspark.shuffle.service.enabled=true"}`
@@ -9081,55 +10626,89 @@ def edit(self,
           specified.
         :param use_ml_runtime: bool (optional)
           This field can only be used with `kind`.
-          
+
           `effective_spark_version` is determined by `spark_version` (DBR release), this field
           `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_running for more details.
         """
         body = {}
         if apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = apply_policy_default_values
-        if autoscale is not None: body['autoscale'] = autoscale.as_dict()
-        if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes
-        if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict()
-        if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict()
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict()
-        if cluster_name is not None: body['cluster_name'] = cluster_name
-        if custom_tags is not None: body['custom_tags'] = custom_tags
-        if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value
-        if docker_image is not None: body['docker_image'] = docker_image.as_dict()
-        if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id
-        if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id
-        if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk
+            body["apply_policy_default_values"] = apply_policy_default_values
+        if autoscale is not None:
+            body["autoscale"] = autoscale.as_dict()
+        if autotermination_minutes is not None:
+            body["autotermination_minutes"] = autotermination_minutes
+        if aws_attributes is not None:
+            body["aws_attributes"] = aws_attributes.as_dict()
+        if azure_attributes is not None:
+            body["azure_attributes"] = azure_attributes.as_dict()
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if cluster_log_conf is not None:
+            body["cluster_log_conf"] = cluster_log_conf.as_dict()
+        if cluster_name is not None:
+            body["cluster_name"] = cluster_name
+        if custom_tags is not None:
+            body["custom_tags"] = custom_tags
+        if data_security_mode is not None:
+            body["data_security_mode"] = data_security_mode.value
+        if docker_image is not None:
+            body["docker_image"] = docker_image.as_dict()
+        if driver_instance_pool_id is not None:
+            body["driver_instance_pool_id"] = driver_instance_pool_id
+        if driver_node_type_id is not None:
+            body["driver_node_type_id"] = driver_node_type_id
+        if enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = enable_elastic_disk
         if enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = enable_local_disk_encryption
-        if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
-        if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts]
-        if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
-        if is_single_node is not None: body['is_single_node'] = is_single_node
-        if kind is not None: body['kind'] = kind.value
-        if node_type_id is not None: body['node_type_id'] = node_type_id
-        if num_workers is not None: body['num_workers'] = num_workers
-        if policy_id is not None: body['policy_id'] = policy_id
-        if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value
-        if single_user_name is not None: body['single_user_name'] = single_user_name
-        if spark_conf is not None: body['spark_conf'] = spark_conf
-        if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars
-        if spark_version is not None: body['spark_version'] = spark_version
-        if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys]
-        if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime
-        if workload_type is not None: body['workload_type'] = workload_type.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/clusters/edit', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_running,
-                    response=EditClusterResponse.from_dict(op_response),
-                    cluster_id=cluster_id)
+            body["enable_local_disk_encryption"] = enable_local_disk_encryption
+        if gcp_attributes is not None:
+            body["gcp_attributes"] = gcp_attributes.as_dict()
+        if init_scripts is not None:
+            body["init_scripts"] = [v.as_dict() for v in init_scripts]
+        if instance_pool_id is not None:
+            body["instance_pool_id"] = instance_pool_id
+        if is_single_node is not None:
+            body["is_single_node"] = is_single_node
+        if kind is not None:
+            body["kind"] = kind.value
+        if node_type_id is not None:
+            body["node_type_id"] = node_type_id
+        if num_workers is not None:
+            body["num_workers"] = num_workers
+        if policy_id is not None:
+            body["policy_id"] = policy_id
+        if runtime_engine is not None:
+            body["runtime_engine"] = runtime_engine.value
+        if single_user_name is not None:
+            body["single_user_name"] = single_user_name
+        if spark_conf is not None:
+            body["spark_conf"] = spark_conf
+        if spark_env_vars is not None:
+            body["spark_env_vars"] = spark_env_vars
+        if spark_version is not None:
+            body["spark_version"] = spark_version
+        if ssh_public_keys is not None:
+            body["ssh_public_keys"] = [v for v in ssh_public_keys]
+        if use_ml_runtime is not None:
+            body["use_ml_runtime"] = use_ml_runtime
+        if workload_type is not None:
+            body["workload_type"] = workload_type.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_running,
+            response=EditClusterResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+        )
 
     def edit_and_wait(
         self,
@@ -9165,54 +10744,59 @@ def edit_and_wait(
         ssh_public_keys: Optional[List[str]] = None,
         use_ml_runtime: Optional[bool] = None,
         workload_type: Optional[WorkloadType] = None,
-        timeout=timedelta(minutes=20)) -> ClusterDetails:
-        return self.edit(apply_policy_default_values=apply_policy_default_values,
-                         autoscale=autoscale,
-                         autotermination_minutes=autotermination_minutes,
-                         aws_attributes=aws_attributes,
-                         azure_attributes=azure_attributes,
-                         cluster_id=cluster_id,
-                         cluster_log_conf=cluster_log_conf,
-                         cluster_name=cluster_name,
-                         custom_tags=custom_tags,
-                         data_security_mode=data_security_mode,
-                         docker_image=docker_image,
-                         driver_instance_pool_id=driver_instance_pool_id,
-                         driver_node_type_id=driver_node_type_id,
-                         enable_elastic_disk=enable_elastic_disk,
-                         enable_local_disk_encryption=enable_local_disk_encryption,
-                         gcp_attributes=gcp_attributes,
-                         init_scripts=init_scripts,
-                         instance_pool_id=instance_pool_id,
-                         is_single_node=is_single_node,
-                         kind=kind,
-                         node_type_id=node_type_id,
-                         num_workers=num_workers,
-                         policy_id=policy_id,
-                         runtime_engine=runtime_engine,
-                         single_user_name=single_user_name,
-                         spark_conf=spark_conf,
-                         spark_env_vars=spark_env_vars,
-                         spark_version=spark_version,
-                         ssh_public_keys=ssh_public_keys,
-                         use_ml_runtime=use_ml_runtime,
-                         workload_type=workload_type).result(timeout=timeout)
-
-    def events(self,
-               cluster_id: str,
-               *,
-               end_time: Optional[int] = None,
-               event_types: Optional[List[EventType]] = None,
-               limit: Optional[int] = None,
-               offset: Optional[int] = None,
-               order: Optional[GetEventsOrder] = None,
-               start_time: Optional[int] = None) -> Iterator[ClusterEvent]:
+        timeout=timedelta(minutes=20),
+    ) -> ClusterDetails:
+        return self.edit(
+            apply_policy_default_values=apply_policy_default_values,
+            autoscale=autoscale,
+            autotermination_minutes=autotermination_minutes,
+            aws_attributes=aws_attributes,
+            azure_attributes=azure_attributes,
+            cluster_id=cluster_id,
+            cluster_log_conf=cluster_log_conf,
+            cluster_name=cluster_name,
+            custom_tags=custom_tags,
+            data_security_mode=data_security_mode,
+            docker_image=docker_image,
+            driver_instance_pool_id=driver_instance_pool_id,
+            driver_node_type_id=driver_node_type_id,
+            enable_elastic_disk=enable_elastic_disk,
+            enable_local_disk_encryption=enable_local_disk_encryption,
+            gcp_attributes=gcp_attributes,
+            init_scripts=init_scripts,
+            instance_pool_id=instance_pool_id,
+            is_single_node=is_single_node,
+            kind=kind,
+            node_type_id=node_type_id,
+            num_workers=num_workers,
+            policy_id=policy_id,
+            runtime_engine=runtime_engine,
+            single_user_name=single_user_name,
+            spark_conf=spark_conf,
+            spark_env_vars=spark_env_vars,
+            spark_version=spark_version,
+            ssh_public_keys=ssh_public_keys,
+            use_ml_runtime=use_ml_runtime,
+            workload_type=workload_type,
+        ).result(timeout=timeout)
+
+    def events(
+        self,
+        cluster_id: str,
+        *,
+        end_time: Optional[int] = None,
+        event_types: Optional[List[EventType]] = None,
+        limit: Optional[int] = None,
+        offset: Optional[int] = None,
+        order: Optional[GetEventsOrder] = None,
+        start_time: Optional[int] = None,
+    ) -> Iterator[ClusterEvent]:
         """List cluster activity events.
-        
+
         Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more
         events to read, the response includes all the nparameters necessary to request the next page of
         events.
-        
+
         :param cluster_id: str
           The ID of the cluster to retrieve events about.
         :param end_time: int (optional)
@@ -9229,92 +10813,117 @@ def events(self,
           The order to list events in; either "ASC" or "DESC". Defaults to "DESC".
         :param start_time: int (optional)
           The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.
-        
+
         :returns: Iterator over :class:`ClusterEvent`
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if end_time is not None: body['end_time'] = end_time
-        if event_types is not None: body['event_types'] = [v.value for v in event_types]
-        if limit is not None: body['limit'] = limit
-        if offset is not None: body['offset'] = offset
-        if order is not None: body['order'] = order.value
-        if start_time is not None: body['start_time'] = start_time
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if end_time is not None:
+            body["end_time"] = end_time
+        if event_types is not None:
+            body["event_types"] = [v.value for v in event_types]
+        if limit is not None:
+            body["limit"] = limit
+        if offset is not None:
+            body["offset"] = offset
+        if order is not None:
+            body["order"] = order.value
+        if start_time is not None:
+            body["start_time"] = start_time
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         while True:
-            json = self._api.do('POST', '/api/2.1/clusters/events', body=body, headers=headers)
-            if 'events' in json:
-                for v in json['events']:
+            json = self._api.do("POST", "/api/2.1/clusters/events", body=body, headers=headers)
+            if "events" in json:
+                for v in json["events"]:
                     yield ClusterEvent.from_dict(v)
-            if 'next_page' not in json or not json['next_page']:
+            if "next_page" not in json or not json["next_page"]:
                 return
-            body = json['next_page']
+            body = json["next_page"]
 
     def get(self, cluster_id: str) -> ClusterDetails:
         """Get cluster info.
-        
+
         Retrieves the information for a cluster given its identifier. Clusters can be described while they are
         running, or up to 60 days after they are terminated.
-        
+
         :param cluster_id: str
           The cluster about which to retrieve information.
-        
+
         :returns: :class:`ClusterDetails`
         """
 
         query = {}
-        if cluster_id is not None: query['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', }
+        if cluster_id is not None:
+            query["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.1/clusters/get', query=query, headers=headers)
+        res = self._api.do("GET", "/api/2.1/clusters/get", query=query, headers=headers)
         return ClusterDetails.from_dict(res)
 
     def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse:
         """Get cluster permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param cluster_id: str
           The cluster for which to get or manage permissions.
-        
+
         :returns: :class:`GetClusterPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/clusters/{cluster_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/clusters/{cluster_id}/permissionLevels",
+            headers=headers,
+        )
         return GetClusterPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, cluster_id: str) -> ClusterPermissions:
         """Get cluster permissions.
-        
+
         Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
-        
+
         :param cluster_id: str
           The cluster for which to get or manage permissions.
-        
+
         :returns: :class:`ClusterPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/clusters/{cluster_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/clusters/{cluster_id}",
+            headers=headers,
+        )
         return ClusterPermissions.from_dict(res)
 
-    def list(self,
-             *,
-             filter_by: Optional[ListClustersFilterBy] = None,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None,
-             sort_by: Optional[ListClustersSortBy] = None) -> Iterator[ClusterDetails]:
+    def list(
+        self,
+        *,
+        filter_by: Optional[ListClustersFilterBy] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+        sort_by: Optional[ListClustersSortBy] = None,
+    ) -> Iterator[ClusterDetails]:
         """List clusters.
-        
+
         Return information about all pinned and active clusters, and all clusters terminated within the last
         30 days. Clusters terminated prior to this period are not included.
-        
+
         :param filter_by: :class:`ListClustersFilterBy` (optional)
           Filters to apply to the list of clusters.
         :param page_size: int (optional)
@@ -9325,100 +10934,125 @@ def list(self,
           previous page of clusters respectively.
         :param sort_by: :class:`ListClustersSortBy` (optional)
           Sort the list of clusters by a specific criteria.
-        
+
         :returns: Iterator over :class:`ClusterDetails`
         """
 
         query = {}
-        if filter_by is not None: query['filter_by'] = filter_by.as_dict()
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if sort_by is not None: query['sort_by'] = sort_by.as_dict()
-        headers = {'Accept': 'application/json', }
+        if filter_by is not None:
+            query["filter_by"] = filter_by.as_dict()
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if sort_by is not None:
+            query["sort_by"] = sort_by.as_dict()
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/clusters/list', query=query, headers=headers)
-            if 'clusters' in json:
-                for v in json['clusters']:
+            json = self._api.do("GET", "/api/2.1/clusters/list", query=query, headers=headers)
+            if "clusters" in json:
+                for v in json["clusters"]:
                     yield ClusterDetails.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def list_node_types(self) -> ListNodeTypesResponse:
         """List node types.
-        
+
         Returns a list of supported Spark node types. These node types can be used to launch a cluster.
-        
+
         :returns: :class:`ListNodeTypesResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.1/clusters/list-node-types', headers=headers)
+        res = self._api.do("GET", "/api/2.1/clusters/list-node-types", headers=headers)
         return ListNodeTypesResponse.from_dict(res)
 
     def list_zones(self) -> ListAvailableZonesResponse:
         """List availability zones.
-        
+
         Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These
         zones can be used to launch a cluster.
-        
+
         :returns: :class:`ListAvailableZonesResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.1/clusters/list-zones', headers=headers)
+        res = self._api.do("GET", "/api/2.1/clusters/list-zones", headers=headers)
         return ListAvailableZonesResponse.from_dict(res)
 
     def permanent_delete(self, cluster_id: str):
         """Permanently delete cluster.
-        
+
         Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously
         removed.
-        
+
         In addition, users will no longer see permanently deleted clusters in the cluster list, and API users
         can no longer perform any action on permanently deleted clusters.
-        
+
         :param cluster_id: str
           The cluster to be deleted.
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/clusters/permanent-delete', body=body, headers=headers)
+        self._api.do(
+            "POST",
+            "/api/2.1/clusters/permanent-delete",
+            body=body,
+            headers=headers,
+        )
 
     def pin(self, cluster_id: str):
         """Pin cluster.
-        
+
         Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a
         cluster that is already pinned will have no effect. This API can only be called by workspace admins.
-        
+
         :param cluster_id: str
           
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/clusters/pin', body=body, headers=headers)
+        self._api.do("POST", "/api/2.1/clusters/pin", body=body, headers=headers)
 
-    def resize(self,
-               cluster_id: str,
-               *,
-               autoscale: Optional[AutoScale] = None,
-               num_workers: Optional[int] = None) -> Wait[ClusterDetails]:
+    def resize(
+        self,
+        cluster_id: str,
+        *,
+        autoscale: Optional[AutoScale] = None,
+        num_workers: Optional[int] = None,
+    ) -> Wait[ClusterDetails]:
         """Resize cluster.
-        
+
         Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a
         `RUNNING` state.
-        
+
         :param cluster_id: str
           The cluster to be resized.
         :param autoscale: :class:`AutoScale` (optional)
@@ -9427,160 +11061,201 @@ def resize(self,
         :param num_workers: int (optional)
           Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
           `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-          
+
           Note: When reading the properties of a cluster, this field reflects the desired number of workers
           rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
           workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
           the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
           provisioned.
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_running for more details.
         """
         body = {}
-        if autoscale is not None: body['autoscale'] = autoscale.as_dict()
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if num_workers is not None: body['num_workers'] = num_workers
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/clusters/resize', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_running,
-                    response=ResizeClusterResponse.from_dict(op_response),
-                    cluster_id=cluster_id)
-
-    def resize_and_wait(self,
-                        cluster_id: str,
-                        *,
-                        autoscale: Optional[AutoScale] = None,
-                        num_workers: Optional[int] = None,
-                        timeout=timedelta(minutes=20)) -> ClusterDetails:
-        return self.resize(autoscale=autoscale, cluster_id=cluster_id,
-                           num_workers=num_workers).result(timeout=timeout)
+        if autoscale is not None:
+            body["autoscale"] = autoscale.as_dict()
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if num_workers is not None:
+            body["num_workers"] = num_workers
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_running,
+            response=ResizeClusterResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+        )
+
+    def resize_and_wait(
+        self,
+        cluster_id: str,
+        *,
+        autoscale: Optional[AutoScale] = None,
+        num_workers: Optional[int] = None,
+        timeout=timedelta(minutes=20),
+    ) -> ClusterDetails:
+        return self.resize(autoscale=autoscale, cluster_id=cluster_id, num_workers=num_workers).result(timeout=timeout)
 
     def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wait[ClusterDetails]:
         """Restart cluster.
-        
+
         Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state,
         nothing will happen.
-        
+
         :param cluster_id: str
           The cluster to be started.
         :param restart_user: str (optional)
           
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_running for more details.
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if restart_user is not None: body['restart_user'] = restart_user
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/clusters/restart', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_running,
-                    response=RestartClusterResponse.from_dict(op_response),
-                    cluster_id=cluster_id)
-
-    def restart_and_wait(self,
-                         cluster_id: str,
-                         *,
-                         restart_user: Optional[str] = None,
-                         timeout=timedelta(minutes=20)) -> ClusterDetails:
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if restart_user is not None:
+            body["restart_user"] = restart_user
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_running,
+            response=RestartClusterResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+        )
+
+    def restart_and_wait(
+        self,
+        cluster_id: str,
+        *,
+        restart_user: Optional[str] = None,
+        timeout=timedelta(minutes=20),
+    ) -> ClusterDetails:
         return self.restart(cluster_id=cluster_id, restart_user=restart_user).result(timeout=timeout)
 
     def set_permissions(
-            self,
-            cluster_id: str,
-            *,
-            access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions:
+        self,
+        cluster_id: str,
+        *,
+        access_control_list: Optional[List[ClusterAccessControlRequest]] = None,
+    ) -> ClusterPermissions:
         """Set cluster permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param cluster_id: str
           The cluster for which to get or manage permissions.
         :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ClusterPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT', f'/api/2.0/permissions/clusters/{cluster_id}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/clusters/{cluster_id}",
+            body=body,
+            headers=headers,
+        )
         return ClusterPermissions.from_dict(res)
 
     def spark_versions(self) -> GetSparkVersionsResponse:
         """List available Spark versions.
-        
+
         Returns the list of available Spark versions. These versions can be used to launch a cluster.
-        
+
         :returns: :class:`GetSparkVersionsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.1/clusters/spark-versions', headers=headers)
+        res = self._api.do("GET", "/api/2.1/clusters/spark-versions", headers=headers)
         return GetSparkVersionsResponse.from_dict(res)
 
     def start(self, cluster_id: str) -> Wait[ClusterDetails]:
         """Start terminated cluster.
-        
+
         Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except:
-        
+
         * The previous cluster id and attributes are preserved. * The cluster starts with the last specified
         cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with
         the minimum number of nodes. * If the cluster is not currently in a `TERMINATED` state, nothing will
         happen. * Clusters launched to run a job cannot be started.
-        
+
         :param cluster_id: str
           The cluster to be started.
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_running for more details.
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        op_response = self._api.do('POST', '/api/2.1/clusters/start', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_running,
-                    response=StartClusterResponse.from_dict(op_response),
-                    cluster_id=cluster_id)
+        op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_running,
+            response=StartClusterResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+        )
 
     def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
         return self.start(cluster_id=cluster_id).result(timeout=timeout)
 
     def unpin(self, cluster_id: str):
         """Unpin cluster.
-        
+
         Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
         Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace
         admins.
-        
+
         :param cluster_id: str
           
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/clusters/unpin', body=body, headers=headers)
+        self._api.do("POST", "/api/2.1/clusters/unpin", body=body, headers=headers)
 
-    def update(self,
-               cluster_id: str,
-               update_mask: str,
-               *,
-               cluster: Optional[UpdateClusterResource] = None) -> Wait[ClusterDetails]:
+    def update(
+        self,
+        cluster_id: str,
+        update_mask: str,
+        *,
+        cluster: Optional[UpdateClusterResource] = None,
+    ) -> Wait[ClusterDetails]:
         """Update cluster configuration (partial).
-        
+
         Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
         fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
         in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
@@ -9589,7 +11264,7 @@ def update(self,
         is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
         rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
         updated.
-        
+
         :param cluster_id: str
           ID of the cluster.
         :param update_mask: str
@@ -9599,21 +11274,29 @@ def update(self,
           string but omit it from the `cluster` object.
         :param cluster: :class:`UpdateClusterResource` (optional)
           The cluster to be updated.
-        
+
         :returns:
           Long-running operation waiter for :class:`ClusterDetails`.
           See :method:wait_get_cluster_running for more details.
         """
         body = {}
-        if cluster is not None: body['cluster'] = cluster.as_dict()
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if update_mask is not None: body['update_mask'] = update_mask
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/clusters/update', body=body, headers=headers)
-        return Wait(self.wait_get_cluster_running,
-                    response=UpdateClusterResponse.from_dict(op_response),
-                    cluster_id=cluster_id)
+        if cluster is not None:
+            body["cluster"] = cluster.as_dict()
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if update_mask is not None:
+            body["update_mask"] = update_mask
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers)
+        return Wait(
+            self.wait_get_cluster_running,
+            response=UpdateClusterResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+        )
 
     def update_and_wait(
         self,
@@ -9621,57 +11304,72 @@ def update_and_wait(
         update_mask: str,
         *,
         cluster: Optional[UpdateClusterResource] = None,
-        timeout=timedelta(minutes=20)) -> ClusterDetails:
-        return self.update(cluster=cluster, cluster_id=cluster_id,
-                           update_mask=update_mask).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> ClusterDetails:
+        return self.update(cluster=cluster, cluster_id=cluster_id, update_mask=update_mask).result(timeout=timeout)
 
     def update_permissions(
-            self,
-            cluster_id: str,
-            *,
-            access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions:
+        self,
+        cluster_id: str,
+        *,
+        access_control_list: Optional[List[ClusterAccessControlRequest]] = None,
+    ) -> ClusterPermissions:
         """Update cluster permissions.
-        
+
         Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
-        
+
         :param cluster_id: str
           The cluster for which to get or manage permissions.
         :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ClusterPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/permissions/clusters/{cluster_id}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/clusters/{cluster_id}",
+            body=body,
+            headers=headers,
+        )
         return ClusterPermissions.from_dict(res)
 
 
 class CommandExecutionAPI:
     """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API
-    only supports (classic) all-purpose clusters. Serverless compute is not supported."""
+    only supports (classic) all-purpose clusters. Serverless compute is not supported.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def wait_command_status_command_execution_cancelled(
-            self,
-            cluster_id: str,
-            command_id: str,
-            context_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse:
+        self,
+        cluster_id: str,
+        command_id: str,
+        context_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[CommandStatusResponse], None]] = None,
+    ) -> CommandStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (CommandStatus.CANCELLED, )
-        failure_states = (CommandStatus.ERROR, )
-        status_message = 'polling...'
+        target_states = (CommandStatus.CANCELLED,)
+        failure_states = (CommandStatus.ERROR,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
-            poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id)
+            poll = self.command_status(
+                cluster_id=cluster_id,
+                command_id=command_id,
+                context_id=context_id,
+            )
             status = poll.status
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.results:
                 status_message = poll.results.cause
             if status in target_states:
@@ -9679,114 +11377,136 @@ def wait_command_status_command_execution_cancelled(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Cancelled, got {status}: {status_message}'
+                msg = f"failed to reach Cancelled, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def wait_context_status_command_execution_running(
-            self,
-            cluster_id: str,
-            context_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse:
+        self,
+        cluster_id: str,
+        context_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[ContextStatusResponse], None]] = None,
+    ) -> ContextStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ContextStatus.RUNNING, )
-        failure_states = (ContextStatus.ERROR, )
-        status_message = 'polling...'
+        target_states = (ContextStatus.RUNNING,)
+        failure_states = (ContextStatus.ERROR,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.context_status(cluster_id=cluster_id, context_id=context_id)
             status = poll.status
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Running, got {status}: {status_message}'
+                msg = f"failed to reach Running, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"cluster_id={cluster_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def wait_command_status_command_execution_finished_or_error(
-            self,
-            cluster_id: str,
-            command_id: str,
-            context_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse:
+        self,
+        cluster_id: str,
+        command_id: str,
+        context_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[CommandStatusResponse], None]] = None,
+    ) -> CommandStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, )
-        failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, )
-        status_message = 'polling...'
+        target_states = (
+            CommandStatus.FINISHED,
+            CommandStatus.ERROR,
+        )
+        failure_states = (
+            CommandStatus.CANCELLED,
+            CommandStatus.CANCELLING,
+        )
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
-            poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id)
+            poll = self.command_status(
+                cluster_id=cluster_id,
+                command_id=command_id,
+                context_id=context_id,
+            )
             status = poll.status
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Finished or Error, got {status}: {status_message}'
+                msg = f"failed to reach Finished or Error, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
-    def cancel(self,
-               *,
-               cluster_id: Optional[str] = None,
-               command_id: Optional[str] = None,
-               context_id: Optional[str] = None) -> Wait[CommandStatusResponse]:
+    def cancel(
+        self,
+        *,
+        cluster_id: Optional[str] = None,
+        command_id: Optional[str] = None,
+        context_id: Optional[str] = None,
+    ) -> Wait[CommandStatusResponse]:
         """Cancel a command.
-        
+
         Cancels a currently running command within an execution context.
-        
+
         The command ID is obtained from a prior successful call to __execute__.
-        
+
         :param cluster_id: str (optional)
         :param command_id: str (optional)
         :param context_id: str (optional)
-        
+
         :returns:
           Long-running operation waiter for :class:`CommandStatusResponse`.
           See :method:wait_command_status_command_execution_cancelled for more details.
         """
         body = {}
-        if cluster_id is not None: body['clusterId'] = cluster_id
-        if command_id is not None: body['commandId'] = command_id
-        if context_id is not None: body['contextId'] = context_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/1.2/commands/cancel', body=body, headers=headers)
-        return Wait(self.wait_command_status_command_execution_cancelled,
-                    response=CancelResponse.from_dict(op_response),
-                    cluster_id=cluster_id,
-                    command_id=command_id,
-                    context_id=context_id)
+        if cluster_id is not None:
+            body["clusterId"] = cluster_id
+        if command_id is not None:
+            body["commandId"] = command_id
+        if context_id is not None:
+            body["contextId"] = context_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers)
+        return Wait(
+            self.wait_command_status_command_execution_cancelled,
+            response=CancelResponse.from_dict(op_response),
+            cluster_id=cluster_id,
+            command_id=command_id,
+            context_id=context_id,
+        )
 
     def cancel_and_wait(
         self,
@@ -9794,118 +11514,144 @@ def cancel_and_wait(
         cluster_id: Optional[str] = None,
         command_id: Optional[str] = None,
         context_id: Optional[str] = None,
-        timeout=timedelta(minutes=20)) -> CommandStatusResponse:
-        return self.cancel(cluster_id=cluster_id, command_id=command_id,
-                           context_id=context_id).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> CommandStatusResponse:
+        return self.cancel(cluster_id=cluster_id, command_id=command_id, context_id=context_id).result(timeout=timeout)
 
     def command_status(self, cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse:
         """Get command info.
-        
+
         Gets the status of and, if available, the results from a currently executing command.
-        
+
         The command ID is obtained from a prior successful call to __execute__.
-        
+
         :param cluster_id: str
         :param context_id: str
         :param command_id: str
-        
+
         :returns: :class:`CommandStatusResponse`
         """
 
         query = {}
-        if cluster_id is not None: query['clusterId'] = cluster_id
-        if command_id is not None: query['commandId'] = command_id
-        if context_id is not None: query['contextId'] = context_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/1.2/commands/status', query=query, headers=headers)
+        if cluster_id is not None:
+            query["clusterId"] = cluster_id
+        if command_id is not None:
+            query["commandId"] = command_id
+        if context_id is not None:
+            query["contextId"] = context_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/1.2/commands/status", query=query, headers=headers)
         return CommandStatusResponse.from_dict(res)
 
     def context_status(self, cluster_id: str, context_id: str) -> ContextStatusResponse:
         """Get status.
-        
+
         Gets the status for an execution context.
-        
+
         :param cluster_id: str
         :param context_id: str
-        
+
         :returns: :class:`ContextStatusResponse`
         """
 
         query = {}
-        if cluster_id is not None: query['clusterId'] = cluster_id
-        if context_id is not None: query['contextId'] = context_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/1.2/contexts/status', query=query, headers=headers)
+        if cluster_id is not None:
+            query["clusterId"] = cluster_id
+        if context_id is not None:
+            query["contextId"] = context_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/1.2/contexts/status", query=query, headers=headers)
         return ContextStatusResponse.from_dict(res)
 
-    def create(self,
-               *,
-               cluster_id: Optional[str] = None,
-               language: Optional[Language] = None) -> Wait[ContextStatusResponse]:
+    def create(
+        self,
+        *,
+        cluster_id: Optional[str] = None,
+        language: Optional[Language] = None,
+    ) -> Wait[ContextStatusResponse]:
         """Create an execution context.
-        
+
         Creates an execution context for running cluster commands.
-        
+
         If successful, this method returns the ID of the new execution context.
-        
+
         :param cluster_id: str (optional)
           Running cluster id
         :param language: :class:`Language` (optional)
-        
+
         :returns:
           Long-running operation waiter for :class:`ContextStatusResponse`.
           See :method:wait_context_status_command_execution_running for more details.
         """
         body = {}
-        if cluster_id is not None: body['clusterId'] = cluster_id
-        if language is not None: body['language'] = language.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/1.2/contexts/create', body=body, headers=headers)
-        return Wait(self.wait_context_status_command_execution_running,
-                    response=Created.from_dict(op_response),
-                    cluster_id=cluster_id,
-                    context_id=op_response['id'])
+        if cluster_id is not None:
+            body["clusterId"] = cluster_id
+        if language is not None:
+            body["language"] = language.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/1.2/contexts/create", body=body, headers=headers)
+        return Wait(
+            self.wait_context_status_command_execution_running,
+            response=Created.from_dict(op_response),
+            cluster_id=cluster_id,
+            context_id=op_response["id"],
+        )
 
     def create_and_wait(
         self,
         *,
         cluster_id: Optional[str] = None,
         language: Optional[Language] = None,
-        timeout=timedelta(minutes=20)) -> ContextStatusResponse:
+        timeout=timedelta(minutes=20),
+    ) -> ContextStatusResponse:
         return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout)
 
     def destroy(self, cluster_id: str, context_id: str):
         """Delete an execution context.
-        
+
         Deletes an execution context.
-        
+
         :param cluster_id: str
         :param context_id: str
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['clusterId'] = cluster_id
-        if context_id is not None: body['contextId'] = context_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["clusterId"] = cluster_id
+        if context_id is not None:
+            body["contextId"] = context_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/1.2/contexts/destroy', body=body, headers=headers)
+        self._api.do("POST", "/api/1.2/contexts/destroy", body=body, headers=headers)
 
-    def execute(self,
-                *,
-                cluster_id: Optional[str] = None,
-                command: Optional[str] = None,
-                context_id: Optional[str] = None,
-                language: Optional[Language] = None) -> Wait[CommandStatusResponse]:
+    def execute(
+        self,
+        *,
+        cluster_id: Optional[str] = None,
+        command: Optional[str] = None,
+        context_id: Optional[str] = None,
+        language: Optional[Language] = None,
+    ) -> Wait[CommandStatusResponse]:
         """Run a command.
-        
+
         Runs a cluster command in the given execution context, using the provided language.
-        
+
         If successful, it returns an ID for tracking the status of the command's execution.
-        
+
         :param cluster_id: str (optional)
           Running cluster id
         :param command: str (optional)
@@ -9913,24 +11659,33 @@ def execute(self,
         :param context_id: str (optional)
           Running context id
         :param language: :class:`Language` (optional)
-        
+
         :returns:
           Long-running operation waiter for :class:`CommandStatusResponse`.
           See :method:wait_command_status_command_execution_finished_or_error for more details.
         """
         body = {}
-        if cluster_id is not None: body['clusterId'] = cluster_id
-        if command is not None: body['command'] = command
-        if context_id is not None: body['contextId'] = context_id
-        if language is not None: body['language'] = language.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/1.2/commands/execute', body=body, headers=headers)
-        return Wait(self.wait_command_status_command_execution_finished_or_error,
-                    response=Created.from_dict(op_response),
-                    cluster_id=cluster_id,
-                    command_id=op_response['id'],
-                    context_id=context_id)
+        if cluster_id is not None:
+            body["clusterId"] = cluster_id
+        if command is not None:
+            body["command"] = command
+        if context_id is not None:
+            body["contextId"] = context_id
+        if language is not None:
+            body["language"] = language.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/1.2/commands/execute", body=body, headers=headers)
+        return Wait(
+            self.wait_command_status_command_execution_finished_or_error,
+            response=Created.from_dict(op_response),
+            cluster_id=cluster_id,
+            command_id=op_response["id"],
+            context_id=context_id,
+        )
 
     def execute_and_wait(
         self,
@@ -9939,15 +11694,20 @@ def execute_and_wait(
         command: Optional[str] = None,
         context_id: Optional[str] = None,
         language: Optional[Language] = None,
-        timeout=timedelta(minutes=20)) -> CommandStatusResponse:
-        return self.execute(cluster_id=cluster_id, command=command, context_id=context_id,
-                            language=language).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> CommandStatusResponse:
+        return self.execute(
+            cluster_id=cluster_id,
+            command=command,
+            context_id=context_id,
+            language=language,
+        ).result(timeout=timeout)
 
 
 class GlobalInitScriptsAPI:
     """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts
     for their workspace. These scripts run on every node in every cluster in the workspace.
-    
+
     **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts.
     Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark
     container fails to launch and init scripts with later position are skipped. If enough containers fail, the
@@ -9956,16 +11716,18 @@ class GlobalInitScriptsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               script: str,
-               *,
-               enabled: Optional[bool] = None,
-               position: Optional[int] = None) -> CreateResponse:
+    def create(
+        self,
+        name: str,
+        script: str,
+        *,
+        enabled: Optional[bool] = None,
+        position: Optional[int] = None,
+    ) -> CreateResponse:
         """Create init script.
-        
+
         Creates a new global init script in this workspace.
-        
+
         :param name: str
           The name of the script
         :param script: str
@@ -9975,85 +11737,102 @@ def create(self,
         :param position: int (optional)
           The position of a global init script, where 0 represents the first script to run, 1 is the second
           script to run, in ascending order.
-          
+
           If you omit the numeric position for a new global init script, it defaults to last position. It will
           run after all current scripts. Setting any value greater than the position of the last script is
           equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2.
           Any position of (3) or greater puts the script in the last position. If an explicit position value
           conflicts with an existing script value, your request succeeds, but the original script at that
           position and all later scripts have their positions incremented by 1.
-        
+
         :returns: :class:`CreateResponse`
         """
         body = {}
-        if enabled is not None: body['enabled'] = enabled
-        if name is not None: body['name'] = name
-        if position is not None: body['position'] = position
-        if script is not None: body['script'] = script
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/global-init-scripts', body=body, headers=headers)
+        if enabled is not None:
+            body["enabled"] = enabled
+        if name is not None:
+            body["name"] = name
+        if position is not None:
+            body["position"] = position
+        if script is not None:
+            body["script"] = script
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/global-init-scripts", body=body, headers=headers)
         return CreateResponse.from_dict(res)
 
     def delete(self, script_id: str):
         """Delete init script.
-        
+
         Deletes a global init script.
-        
+
         :param script_id: str
           The ID of the global init script.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.0/global-init-scripts/{script_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/global-init-scripts/{script_id}",
+            headers=headers,
+        )
 
     def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent:
         """Get an init script.
-        
+
         Gets all the details of a script, including its Base64-encoded contents.
-        
+
         :param script_id: str
           The ID of the global init script.
-        
+
         :returns: :class:`GlobalInitScriptDetailsWithContent`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/global-init-scripts/{script_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/global-init-scripts/{script_id}", headers=headers)
         return GlobalInitScriptDetailsWithContent.from_dict(res)
 
     def list(self) -> Iterator[GlobalInitScriptDetails]:
         """Get init scripts.
-        
+
         Get a list of all global init scripts for this workspace. This returns all properties for each script
         but **not** the script contents. To retrieve the contents of a script, use the [get a global init
         script](:method:globalinitscripts/get) operation.
-        
+
         :returns: Iterator over :class:`GlobalInitScriptDetails`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/global-init-scripts', headers=headers)
+        json = self._api.do("GET", "/api/2.0/global-init-scripts", headers=headers)
         parsed = ListGlobalInitScriptsResponse.from_dict(json).scripts
         return parsed if parsed is not None else []
 
-    def update(self,
-               script_id: str,
-               name: str,
-               script: str,
-               *,
-               enabled: Optional[bool] = None,
-               position: Optional[int] = None):
+    def update(
+        self,
+        script_id: str,
+        name: str,
+        script: str,
+        *,
+        enabled: Optional[bool] = None,
+        position: Optional[int] = None,
+    ):
         """Update init script.
-        
+
         Updates a global init script, specifying only the fields to change. All fields are optional.
         Unspecified fields retain their current value.
-        
+
         :param script_id: str
           The ID of the global init script.
         :param name: str
@@ -10065,64 +11844,77 @@ def update(self,
         :param position: int (optional)
           The position of a script, where 0 represents the first script to run, 1 is the second script to run,
           in ascending order. To move the script to run first, set its position to 0.
-          
+
           To move the script to the end, set its position to any value greater or equal to the position of the
           last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or
           greater puts the script in the last position (2).
-          
+
           If an explicit position value conflicts with an existing script, your request succeeds, but the
           original script at that position and all later scripts have their positions incremented by 1.
-        
-        
+
+
         """
         body = {}
-        if enabled is not None: body['enabled'] = enabled
-        if name is not None: body['name'] = name
-        if position is not None: body['position'] = position
-        if script is not None: body['script'] = script
-        headers = {'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', f'/api/2.0/global-init-scripts/{script_id}', body=body, headers=headers)
+        if enabled is not None:
+            body["enabled"] = enabled
+        if name is not None:
+            body["name"] = name
+        if position is not None:
+            body["position"] = position
+        if script is not None:
+            body["script"] = script
+        headers = {
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/global-init-scripts/{script_id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class InstancePoolsAPI:
     """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud
     instances which reduces a cluster start and auto-scaling times.
-    
+
     Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use
     instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle
     instances. If the pool has no idle instances, the pool expands by allocating a new instance from the
     instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it
     returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that
     pool’s idle instances.
-    
+
     You can specify a different pool for the driver node and worker nodes, or use the same pool for both.
-    
+
     Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does
     apply. See pricing."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               instance_pool_name: str,
-               node_type_id: str,
-               *,
-               aws_attributes: Optional[InstancePoolAwsAttributes] = None,
-               azure_attributes: Optional[InstancePoolAzureAttributes] = None,
-               custom_tags: Optional[Dict[str, str]] = None,
-               disk_spec: Optional[DiskSpec] = None,
-               enable_elastic_disk: Optional[bool] = None,
-               gcp_attributes: Optional[InstancePoolGcpAttributes] = None,
-               idle_instance_autotermination_minutes: Optional[int] = None,
-               max_capacity: Optional[int] = None,
-               min_idle_instances: Optional[int] = None,
-               preloaded_docker_images: Optional[List[DockerImage]] = None,
-               preloaded_spark_versions: Optional[List[str]] = None) -> CreateInstancePoolResponse:
+    def create(
+        self,
+        instance_pool_name: str,
+        node_type_id: str,
+        *,
+        aws_attributes: Optional[InstancePoolAwsAttributes] = None,
+        azure_attributes: Optional[InstancePoolAzureAttributes] = None,
+        custom_tags: Optional[Dict[str, str]] = None,
+        disk_spec: Optional[DiskSpec] = None,
+        enable_elastic_disk: Optional[bool] = None,
+        gcp_attributes: Optional[InstancePoolGcpAttributes] = None,
+        idle_instance_autotermination_minutes: Optional[int] = None,
+        max_capacity: Optional[int] = None,
+        min_idle_instances: Optional[int] = None,
+        preloaded_docker_images: Optional[List[DockerImage]] = None,
+        preloaded_spark_versions: Optional[List[str]] = None,
+    ) -> CreateInstancePoolResponse:
         """Create a new instance pool.
-        
+
         Creates a new instance pool using idle and ready-to-use cloud instances.
-        
+
         :param instance_pool_name: str
           Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
           characters.
@@ -10140,7 +11932,7 @@ def create(self,
         :param custom_tags: Dict[str,str] (optional)
           Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
           EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
+
           - Currently, Databricks allows at most 45 custom tags
         :param disk_spec: :class:`DiskSpec` (optional)
           Defines the specification of the disks that will be attached to all spark containers.
@@ -10169,60 +11961,89 @@ def create(self,
           A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters
           started with the preloaded Spark version will start faster. A list of available Spark versions can
           be retrieved by using the :method:clusters/sparkVersions API call.
-        
+
         :returns: :class:`CreateInstancePoolResponse`
         """
         body = {}
-        if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict()
-        if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict()
-        if custom_tags is not None: body['custom_tags'] = custom_tags
-        if disk_spec is not None: body['disk_spec'] = disk_spec.as_dict()
-        if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk
-        if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
+        if aws_attributes is not None:
+            body["aws_attributes"] = aws_attributes.as_dict()
+        if azure_attributes is not None:
+            body["azure_attributes"] = azure_attributes.as_dict()
+        if custom_tags is not None:
+            body["custom_tags"] = custom_tags
+        if disk_spec is not None:
+            body["disk_spec"] = disk_spec.as_dict()
+        if enable_elastic_disk is not None:
+            body["enable_elastic_disk"] = enable_elastic_disk
+        if gcp_attributes is not None:
+            body["gcp_attributes"] = gcp_attributes.as_dict()
         if idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes
-        if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name
-        if max_capacity is not None: body['max_capacity'] = max_capacity
-        if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances
-        if node_type_id is not None: body['node_type_id'] = node_type_id
+            body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes
+        if instance_pool_name is not None:
+            body["instance_pool_name"] = instance_pool_name
+        if max_capacity is not None:
+            body["max_capacity"] = max_capacity
+        if min_idle_instances is not None:
+            body["min_idle_instances"] = min_idle_instances
+        if node_type_id is not None:
+            body["node_type_id"] = node_type_id
         if preloaded_docker_images is not None:
-            body['preloaded_docker_images'] = [v.as_dict() for v in preloaded_docker_images]
+            body["preloaded_docker_images"] = [v.as_dict() for v in preloaded_docker_images]
         if preloaded_spark_versions is not None:
-            body['preloaded_spark_versions'] = [v for v in preloaded_spark_versions]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/instance-pools/create', body=body, headers=headers)
+            body["preloaded_spark_versions"] = [v for v in preloaded_spark_versions]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/instance-pools/create",
+            body=body,
+            headers=headers,
+        )
         return CreateInstancePoolResponse.from_dict(res)
 
     def delete(self, instance_pool_id: str):
         """Delete an instance pool.
-        
+
         Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.
-        
+
         :param instance_pool_id: str
           The instance pool to be terminated.
-        
-        
-        """
-        body = {}
-        if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/instance-pools/delete', body=body, headers=headers)
 
-    def edit(self,
-             instance_pool_id: str,
-             instance_pool_name: str,
-             node_type_id: str,
-             *,
-             custom_tags: Optional[Dict[str, str]] = None,
-             idle_instance_autotermination_minutes: Optional[int] = None,
-             max_capacity: Optional[int] = None,
-             min_idle_instances: Optional[int] = None):
+        """
+        body = {}
+        if instance_pool_id is not None:
+            body["instance_pool_id"] = instance_pool_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/instance-pools/delete",
+            body=body,
+            headers=headers,
+        )
+
+    def edit(
+        self,
+        instance_pool_id: str,
+        instance_pool_name: str,
+        node_type_id: str,
+        *,
+        custom_tags: Optional[Dict[str, str]] = None,
+        idle_instance_autotermination_minutes: Optional[int] = None,
+        max_capacity: Optional[int] = None,
+        min_idle_instances: Optional[int] = None,
+    ):
         """Edit an existing instance pool.
-        
+
         Modifies the configuration of an existing instance pool.
-        
+
         :param instance_pool_id: str
           Instance pool ID
         :param instance_pool_name: str
@@ -10236,7 +12057,7 @@ def edit(self,
         :param custom_tags: Dict[str,str] (optional)
           Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
           EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
+
           - Currently, Databricks allows at most 45 custom tags
         :param idle_instance_autotermination_minutes: int (optional)
           Automatically terminates the extra instances in the pool cache after they are inactive for this time
@@ -10250,86 +12071,110 @@ def edit(self,
           upsize requests.
         :param min_idle_instances: int (optional)
           Minimum number of idle instances to keep in the instance pool
-        
-        
+
+
         """
         body = {}
-        if custom_tags is not None: body['custom_tags'] = custom_tags
+        if custom_tags is not None:
+            body["custom_tags"] = custom_tags
         if idle_instance_autotermination_minutes is not None:
-            body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes
-        if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
-        if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name
-        if max_capacity is not None: body['max_capacity'] = max_capacity
-        if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances
-        if node_type_id is not None: body['node_type_id'] = node_type_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/instance-pools/edit', body=body, headers=headers)
+            body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes
+        if instance_pool_id is not None:
+            body["instance_pool_id"] = instance_pool_id
+        if instance_pool_name is not None:
+            body["instance_pool_name"] = instance_pool_name
+        if max_capacity is not None:
+            body["max_capacity"] = max_capacity
+        if min_idle_instances is not None:
+            body["min_idle_instances"] = min_idle_instances
+        if node_type_id is not None:
+            body["node_type_id"] = node_type_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do("POST", "/api/2.0/instance-pools/edit", body=body, headers=headers)
 
     def get(self, instance_pool_id: str) -> GetInstancePool:
         """Get instance pool information.
-        
+
         Retrieve the information for an instance pool based on its identifier.
-        
+
         :param instance_pool_id: str
           The canonical unique identifier for the instance pool.
-        
+
         :returns: :class:`GetInstancePool`
         """
 
         query = {}
-        if instance_pool_id is not None: query['instance_pool_id'] = instance_pool_id
-        headers = {'Accept': 'application/json', }
+        if instance_pool_id is not None:
+            query["instance_pool_id"] = instance_pool_id
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/instance-pools/get', query=query, headers=headers)
+        res = self._api.do("GET", "/api/2.0/instance-pools/get", query=query, headers=headers)
         return GetInstancePool.from_dict(res)
 
     def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse:
         """Get instance pool permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
-        
+
         :returns: :class:`GetInstancePoolPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels",
+            headers=headers,
+        )
         return GetInstancePoolPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions:
         """Get instance pool permissions.
-        
+
         Gets the permissions of an instance pool. Instance pools can inherit permissions from their root
         object.
-        
+
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
-        
+
         :returns: :class:`InstancePoolPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/instance-pools/{instance_pool_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/instance-pools/{instance_pool_id}",
+            headers=headers,
+        )
         return InstancePoolPermissions.from_dict(res)
 
     def list(self) -> Iterator[InstancePoolAndStats]:
         """List instance pool info.
-        
+
         Gets a list of instance pools with their statistics.
-        
+
         :returns: Iterator over :class:`InstancePoolAndStats`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/instance-pools/list', headers=headers)
+        json = self._api.do("GET", "/api/2.0/instance-pools/list", headers=headers)
         parsed = ListInstancePools.from_dict(json).instance_pools
         return parsed if parsed is not None else []
 
@@ -10337,56 +12182,66 @@ def set_permissions(
         self,
         instance_pool_id: str,
         *,
-        access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None
+        access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None,
     ) -> InstancePoolPermissions:
         """Set instance pool permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
         :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
-        
+
         :returns: :class:`InstancePoolPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/instance-pools/{instance_pool_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/instance-pools/{instance_pool_id}",
+            body=body,
+            headers=headers,
+        )
         return InstancePoolPermissions.from_dict(res)
 
     def update_permissions(
         self,
         instance_pool_id: str,
         *,
-        access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None
+        access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None,
     ) -> InstancePoolPermissions:
         """Update instance pool permissions.
-        
+
         Updates the permissions on an instance pool. Instance pools can inherit permissions from their root
         object.
-        
+
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
         :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
-        
+
         :returns: :class:`InstancePoolPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/instance-pools/{instance_pool_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/instance-pools/{instance_pool_id}",
+            body=body,
+            headers=headers,
+        )
         return InstancePoolPermissions.from_dict(res)
 
 
@@ -10394,32 +12249,35 @@ class InstanceProfilesAPI:
     """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch
     clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3
     buckets] using instance profiles for more information.
-    
-    [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html"""
+
+    [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def add(self,
-            instance_profile_arn: str,
-            *,
-            iam_role_arn: Optional[str] = None,
-            is_meta_instance_profile: Optional[bool] = None,
-            skip_validation: Optional[bool] = None):
+    def add(
+        self,
+        instance_profile_arn: str,
+        *,
+        iam_role_arn: Optional[str] = None,
+        is_meta_instance_profile: Optional[bool] = None,
+        skip_validation: Optional[bool] = None,
+    ):
         """Register an instance profile.
-        
+
         In the UI, you can select the instance profile when launching clusters. This API is only available to
         admin users.
-        
+
         :param instance_profile_arn: str
           The AWS ARN of the instance profile to register with Databricks. This field is required.
         :param iam_role_arn: str (optional)
           The AWS IAM role ARN of the role associated with the instance profile. This field is required if
           your role name and instance profile name do not match and you want to use the instance profile with
           [Databricks SQL Serverless].
-          
+
           Otherwise, this field is optional.
-          
+
           [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
         :param is_meta_instance_profile: bool (optional)
           Boolean flag indicating whether the instance profile should only be used in credential passthrough
@@ -10432,276 +12290,350 @@ def add(self,
           fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your
           requested instance type is not supported in your requested availability zone”), you can pass this
           flag to skip the validation and forcibly add the instance profile.
-        
-        
-        """
-        body = {}
-        if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn
-        if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn
-        if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile
-        if skip_validation is not None: body['skip_validation'] = skip_validation
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/instance-profiles/add', body=body, headers=headers)
 
-    def edit(self,
-             instance_profile_arn: str,
-             *,
-             iam_role_arn: Optional[str] = None,
-             is_meta_instance_profile: Optional[bool] = None):
+        """
+        body = {}
+        if iam_role_arn is not None:
+            body["iam_role_arn"] = iam_role_arn
+        if instance_profile_arn is not None:
+            body["instance_profile_arn"] = instance_profile_arn
+        if is_meta_instance_profile is not None:
+            body["is_meta_instance_profile"] = is_meta_instance_profile
+        if skip_validation is not None:
+            body["skip_validation"] = skip_validation
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/instance-profiles/add",
+            body=body,
+            headers=headers,
+        )
+
+    def edit(
+        self,
+        instance_profile_arn: str,
+        *,
+        iam_role_arn: Optional[str] = None,
+        is_meta_instance_profile: Optional[bool] = None,
+    ):
         """Edit an instance profile.
-        
+
         The only supported field to change is the optional IAM role ARN associated with the instance profile.
         It is required to specify the IAM role ARN if both of the following are true:
-        
+
         * Your role name and instance profile name do not match. The name is the part after the last slash in
         each ARN. * You want to use the instance profile with [Databricks SQL Serverless].
-        
+
         To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses].
-        
+
         This API is only available to admin users.
-        
+
         [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
         [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html
-        
+
         :param instance_profile_arn: str
           The AWS ARN of the instance profile to register with Databricks. This field is required.
         :param iam_role_arn: str (optional)
           The AWS IAM role ARN of the role associated with the instance profile. This field is required if
           your role name and instance profile name do not match and you want to use the instance profile with
           [Databricks SQL Serverless].
-          
+
           Otherwise, this field is optional.
-          
+
           [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
         :param is_meta_instance_profile: bool (optional)
           Boolean flag indicating whether the instance profile should only be used in credential passthrough
           scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
           wide range of roles. Therefore it should always be used with authorization. This field is optional,
           the default value is `false`.
-        
-        
+
+
         """
         body = {}
-        if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn
-        if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn
-        if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/instance-profiles/edit', body=body, headers=headers)
+        if iam_role_arn is not None:
+            body["iam_role_arn"] = iam_role_arn
+        if instance_profile_arn is not None:
+            body["instance_profile_arn"] = instance_profile_arn
+        if is_meta_instance_profile is not None:
+            body["is_meta_instance_profile"] = is_meta_instance_profile
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/instance-profiles/edit",
+            body=body,
+            headers=headers,
+        )
 
     def list(self) -> Iterator[InstanceProfile]:
         """List available instance profiles.
-        
+
         List the instance profiles that the calling user can use to launch a cluster.
-        
+
         This API is available to all users.
-        
+
         :returns: Iterator over :class:`InstanceProfile`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/instance-profiles/list', headers=headers)
+        json = self._api.do("GET", "/api/2.0/instance-profiles/list", headers=headers)
         parsed = ListInstanceProfilesResponse.from_dict(json).instance_profiles
         return parsed if parsed is not None else []
 
     def remove(self, instance_profile_arn: str):
         """Remove the instance profile.
-        
+
         Remove the instance profile with the provided ARN. Existing clusters with this instance profile will
         continue to function.
-        
+
         This API is only accessible to admin users.
-        
+
         :param instance_profile_arn: str
           The ARN of the instance profile to remove. This field is required.
-        
-        
+
+
         """
         body = {}
-        if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if instance_profile_arn is not None:
+            body["instance_profile_arn"] = instance_profile_arn
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/instance-profiles/remove', body=body, headers=headers)
+        self._api.do(
+            "POST",
+            "/api/2.0/instance-profiles/remove",
+            body=body,
+            headers=headers,
+        )
 
 
 class LibrariesAPI:
     """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a
     cluster.
-    
+
     To make third-party or custom code available to notebooks and jobs running on your clusters, you can
     install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java,
     Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories.
-    
+
     Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library
     directly from a public repository such as PyPI or Maven, using a previously installed workspace library,
     or using an init script.
-    
+
     When you uninstall a library from a cluster, the library is removed only when you restart the cluster.
-    Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart."""
+    Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def all_cluster_statuses(self) -> Iterator[ClusterLibraryStatuses]:
         """Get all statuses.
-        
+
         Get the status of all libraries on all clusters. A status is returned for all libraries installed on
         this cluster via the API or the libraries UI.
-        
+
         :returns: Iterator over :class:`ClusterLibraryStatuses`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/libraries/all-cluster-statuses', headers=headers)
+        json = self._api.do("GET", "/api/2.0/libraries/all-cluster-statuses", headers=headers)
         parsed = ListAllClusterLibraryStatusesResponse.from_dict(json).statuses
         return parsed if parsed is not None else []
 
     def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]:
         """Get status.
-        
+
         Get the status of libraries on a cluster. A status is returned for all libraries installed on this
         cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries
         set to be installed on this cluster, in the order that the libraries were added to the cluster, are
         returned first. 2. Libraries that were previously requested to be installed on this cluster or, but
         are now marked for removal, in no particular order, are returned last.
-        
+
         :param cluster_id: str
           Unique identifier of the cluster whose status should be retrieved.
-        
+
         :returns: Iterator over :class:`LibraryFullStatus`
         """
 
         query = {}
-        if cluster_id is not None: query['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', }
-
-        json = self._api.do('GET', '/api/2.0/libraries/cluster-status', query=query, headers=headers)
+        if cluster_id is not None:
+            query["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        json = self._api.do(
+            "GET",
+            "/api/2.0/libraries/cluster-status",
+            query=query,
+            headers=headers,
+        )
         parsed = ClusterLibraryStatuses.from_dict(json).library_statuses
         return parsed if parsed is not None else []
 
     def install(self, cluster_id: str, libraries: List[Library]):
         """Add a library.
-        
+
         Add libraries to install on a cluster. The installation is asynchronous; it happens in the background
         after the completion of this request.
-        
+
         :param cluster_id: str
           Unique identifier for the cluster on which to install these libraries.
         :param libraries: List[:class:`Library`]
           The libraries to install.
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if libraries is not None:
+            body["libraries"] = [v.as_dict() for v in libraries]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/libraries/install', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers)
 
     def uninstall(self, cluster_id: str, libraries: List[Library]):
         """Uninstall libraries.
-        
+
         Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is
         restarted. A request to uninstall a library that is not currently installed is ignored.
-        
+
         :param cluster_id: str
           Unique identifier for the cluster on which to uninstall these libraries.
         :param libraries: List[:class:`Library`]
           The libraries to uninstall.
-        
-        
+
+
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if libraries is not None:
+            body["libraries"] = [v.as_dict() for v in libraries]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/libraries/uninstall', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers)
 
 
 class PolicyComplianceForClustersAPI:
     """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your
     workspace.
-    
+
     A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
     be out of compliance if their policy was updated after the cluster was last edited.
-    
+
     The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
-    compliance API allows you to update a cluster to be compliant with the current version of its policy."""
+    compliance API allows you to update a cluster to be compliant with the current version of its policy.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def enforce_compliance(self,
-                           cluster_id: str,
-                           *,
-                           validate_only: Optional[bool] = None) -> EnforceClusterComplianceResponse:
+    def enforce_compliance(
+        self, cluster_id: str, *, validate_only: Optional[bool] = None
+    ) -> EnforceClusterComplianceResponse:
         """Enforce cluster policy compliance.
-        
+
         Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
         it is in a `RUNNING` or `TERMINATED` state.
-        
+
         If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
         can take effect.
-        
+
         If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
         cluster is started, the new attributes will take effect.
-        
+
         Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
         Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
-        
+
         :param cluster_id: str
           The ID of the cluster you want to enforce policy compliance on.
         :param validate_only: bool (optional)
           If set, previews the changes that would be made to a cluster to enforce compliance but does not
           update the cluster.
-        
+
         :returns: :class:`EnforceClusterComplianceResponse`
         """
         body = {}
-        if cluster_id is not None: body['cluster_id'] = cluster_id
-        if validate_only is not None: body['validate_only'] = validate_only
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           '/api/2.0/policies/clusters/enforce-compliance',
-                           body=body,
-                           headers=headers)
+        if cluster_id is not None:
+            body["cluster_id"] = cluster_id
+        if validate_only is not None:
+            body["validate_only"] = validate_only
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/policies/clusters/enforce-compliance",
+            body=body,
+            headers=headers,
+        )
         return EnforceClusterComplianceResponse.from_dict(res)
 
     def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse:
         """Get cluster policy compliance.
-        
+
         Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
         was updated after the cluster was last edited.
-        
+
         :param cluster_id: str
           The ID of the cluster to get the compliance status
-        
+
         :returns: :class:`GetClusterComplianceResponse`
         """
 
         query = {}
-        if cluster_id is not None: query['cluster_id'] = cluster_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/policies/clusters/get-compliance', query=query, headers=headers)
+        if cluster_id is not None:
+            query["cluster_id"] = cluster_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/policies/clusters/get-compliance",
+            query=query,
+            headers=headers,
+        )
         return GetClusterComplianceResponse.from_dict(res)
 
-    def list_compliance(self,
-                        policy_id: str,
-                        *,
-                        page_size: Optional[int] = None,
-                        page_token: Optional[str] = None) -> Iterator[ClusterCompliance]:
+    def list_compliance(
+        self,
+        policy_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ClusterCompliance]:
         """List cluster policy compliance.
-        
+
         Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
         compliance if their policy was updated after the cluster was last edited.
-        
+
         :param policy_id: str
           Canonical unique identifier for the cluster policy.
         :param page_size: int (optional)
@@ -10710,36 +12642,43 @@ def list_compliance(self,
         :param page_token: str (optional)
           A page token that can be used to navigate to the next page or previous page as returned by
           `next_page_token` or `prev_page_token`.
-        
+
         :returns: Iterator over :class:`ClusterCompliance`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if policy_id is not None: query['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if policy_id is not None:
+            query["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/policies/clusters/list-compliance',
-                                query=query,
-                                headers=headers)
-            if 'clusters' in json:
-                for v in json['clusters']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/policies/clusters/list-compliance",
+                query=query,
+                headers=headers,
+            )
+            if "clusters" in json:
+                for v in json["clusters"]:
                     yield ClusterCompliance.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class PolicyFamiliesAPI:
     """View available policy families. A policy family contains a policy definition providing best practices for
     configuring clusters for a particular use case.
-    
+
     Databricks manages and provides policy families for several common cluster use cases. You cannot create,
     edit, or delete policy families.
-    
+
     Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a
     policy family. Cluster policies created using a policy family inherit the policy family's policy
     definition."""
@@ -10749,54 +12688,65 @@ def __init__(self, api_client):
 
     def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily:
         """Get policy family information.
-        
+
         Retrieve the information for an policy family based on its identifier and version
-        
+
         :param policy_family_id: str
           The family ID about which to retrieve information.
         :param version: int (optional)
           The version number for the family to fetch. Defaults to the latest version.
-        
+
         :returns: :class:`PolicyFamily`
         """
 
         query = {}
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.0/policy-families/{policy_family_id}',
-                           query=query,
-                           headers=headers)
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/policy-families/{policy_family_id}",
+            query=query,
+            headers=headers,
+        )
         return PolicyFamily.from_dict(res)
 
-    def list(self,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[PolicyFamily]:
+    def list(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[PolicyFamily]:
         """List policy families.
-        
+
         Returns the list of policy definition types available to use at their latest version. This API is
         paginated.
-        
+
         :param max_results: int (optional)
           Maximum number of policy families to return.
         :param page_token: str (optional)
           A token that can be used to get the next page of results.
-        
+
         :returns: Iterator over :class:`PolicyFamily`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/policy-families', query=query, headers=headers)
-            if 'policy_families' in json:
-                for v in json['policy_families']:
+            json = self._api.do("GET", "/api/2.0/policy-families", query=query, headers=headers)
+            if "policy_families" in json:
+                for v in json["policy_families"]:
                     yield PolicyFamily.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index ba01ba41d..d3f00a535 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 from databricks.sdk.service import sql
 
@@ -27,19 +27,21 @@ class CancelQueryExecutionResponse:
     def as_dict(self) -> dict:
         """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.status: body['status'] = [v.as_dict() for v in self.status]
+        if self.status:
+            body["status"] = [v.as_dict() for v in self.status]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.status: body['status'] = self.status
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponse:
         """Deserializes the CancelQueryExecutionResponse from a dictionary."""
-        return cls(status=_repeated_dict(d, 'status', CancelQueryExecutionResponseStatus))
+        return cls(status=_repeated_dict(d, "status", CancelQueryExecutionResponseStatus))
 
 
 @dataclass
@@ -59,25 +61,33 @@ class CancelQueryExecutionResponseStatus:
     def as_dict(self) -> dict:
         """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_token is not None: body['data_token'] = self.data_token
-        if self.pending: body['pending'] = self.pending.as_dict()
-        if self.success: body['success'] = self.success.as_dict()
+        if self.data_token is not None:
+            body["data_token"] = self.data_token
+        if self.pending:
+            body["pending"] = self.pending.as_dict()
+        if self.success:
+            body["success"] = self.success.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_token is not None: body['data_token'] = self.data_token
-        if self.pending: body['pending'] = self.pending
-        if self.success: body['success'] = self.success
+        if self.data_token is not None:
+            body["data_token"] = self.data_token
+        if self.pending:
+            body["pending"] = self.pending
+        if self.success:
+            body["success"] = self.success
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponseStatus:
         """Deserializes the CancelQueryExecutionResponseStatus from a dictionary."""
-        return cls(data_token=d.get('data_token', None),
-                   pending=_from_dict(d, 'pending', Empty),
-                   success=_from_dict(d, 'success', Empty))
+        return cls(
+            data_token=d.get("data_token", None),
+            pending=_from_dict(d, "pending", Empty),
+            success=_from_dict(d, "success", Empty),
+        )
 
 
 @dataclass
@@ -98,23 +108,27 @@ def as_dict(self) -> dict:
         """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.quartz_cron_expression is not None:
-            body['quartz_cron_expression'] = self.quartz_cron_expression
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+            body["quartz_cron_expression"] = self.quartz_cron_expression
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CronSchedule into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.quartz_cron_expression is not None:
-            body['quartz_cron_expression'] = self.quartz_cron_expression
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+            body["quartz_cron_expression"] = self.quartz_cron_expression
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronSchedule:
         """Deserializes the CronSchedule from a dictionary."""
-        return cls(quartz_cron_expression=d.get('quartz_cron_expression', None),
-                   timezone_id=d.get('timezone_id', None))
+        return cls(
+            quartz_cron_expression=d.get("quartz_cron_expression", None),
+            timezone_id=d.get("timezone_id", None),
+        )
 
 
 @dataclass
@@ -161,72 +175,94 @@ class Dashboard:
     def as_dict(self) -> dict:
         """Serializes the Dashboard into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.path is not None: body['path'] = self.path
-        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state.value
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.path is not None:
+            body["path"] = self.path
+        if self.serialized_dashboard is not None:
+            body["serialized_dashboard"] = self.serialized_dashboard
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Dashboard into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.path is not None: body['path'] = self.path
-        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.path is not None:
+            body["path"] = self.path
+        if self.serialized_dashboard is not None:
+            body["serialized_dashboard"] = self.serialized_dashboard
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dashboard:
         """Deserializes the Dashboard from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   etag=d.get('etag', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   parent_path=d.get('parent_path', None),
-                   path=d.get('path', None),
-                   serialized_dashboard=d.get('serialized_dashboard', None),
-                   update_time=d.get('update_time', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            dashboard_id=d.get("dashboard_id", None),
+            display_name=d.get("display_name", None),
+            etag=d.get("etag", None),
+            lifecycle_state=_enum(d, "lifecycle_state", LifecycleState),
+            parent_path=d.get("parent_path", None),
+            path=d.get("path", None),
+            serialized_dashboard=d.get("serialized_dashboard", None),
+            update_time=d.get("update_time", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 class DashboardView(Enum):
 
-    DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC'
+    DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC"
 
 
 class DataType(Enum):
 
-    DATA_TYPE_ARRAY = 'DATA_TYPE_ARRAY'
-    DATA_TYPE_BIG_INT = 'DATA_TYPE_BIG_INT'
-    DATA_TYPE_BINARY = 'DATA_TYPE_BINARY'
-    DATA_TYPE_BOOLEAN = 'DATA_TYPE_BOOLEAN'
-    DATA_TYPE_DATE = 'DATA_TYPE_DATE'
-    DATA_TYPE_DECIMAL = 'DATA_TYPE_DECIMAL'
-    DATA_TYPE_DOUBLE = 'DATA_TYPE_DOUBLE'
-    DATA_TYPE_FLOAT = 'DATA_TYPE_FLOAT'
-    DATA_TYPE_INT = 'DATA_TYPE_INT'
-    DATA_TYPE_INTERVAL = 'DATA_TYPE_INTERVAL'
-    DATA_TYPE_MAP = 'DATA_TYPE_MAP'
-    DATA_TYPE_SMALL_INT = 'DATA_TYPE_SMALL_INT'
-    DATA_TYPE_STRING = 'DATA_TYPE_STRING'
-    DATA_TYPE_STRUCT = 'DATA_TYPE_STRUCT'
-    DATA_TYPE_TIMESTAMP = 'DATA_TYPE_TIMESTAMP'
-    DATA_TYPE_TINY_INT = 'DATA_TYPE_TINY_INT'
-    DATA_TYPE_VOID = 'DATA_TYPE_VOID'
+    DATA_TYPE_ARRAY = "DATA_TYPE_ARRAY"
+    DATA_TYPE_BIG_INT = "DATA_TYPE_BIG_INT"
+    DATA_TYPE_BINARY = "DATA_TYPE_BINARY"
+    DATA_TYPE_BOOLEAN = "DATA_TYPE_BOOLEAN"
+    DATA_TYPE_DATE = "DATA_TYPE_DATE"
+    DATA_TYPE_DECIMAL = "DATA_TYPE_DECIMAL"
+    DATA_TYPE_DOUBLE = "DATA_TYPE_DOUBLE"
+    DATA_TYPE_FLOAT = "DATA_TYPE_FLOAT"
+    DATA_TYPE_INT = "DATA_TYPE_INT"
+    DATA_TYPE_INTERVAL = "DATA_TYPE_INTERVAL"
+    DATA_TYPE_MAP = "DATA_TYPE_MAP"
+    DATA_TYPE_SMALL_INT = "DATA_TYPE_SMALL_INT"
+    DATA_TYPE_STRING = "DATA_TYPE_STRING"
+    DATA_TYPE_STRUCT = "DATA_TYPE_STRUCT"
+    DATA_TYPE_TIMESTAMP = "DATA_TYPE_TIMESTAMP"
+    DATA_TYPE_TINY_INT = "DATA_TYPE_TINY_INT"
+    DATA_TYPE_VOID = "DATA_TYPE_VOID"
 
 
 @dataclass
@@ -308,25 +344,33 @@ class ExecutePublishedDashboardQueryRequest:
     def as_dict(self) -> dict:
         """Serializes the ExecutePublishedDashboardQueryRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name
-        if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id
-        if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id
+        if self.dashboard_name is not None:
+            body["dashboard_name"] = self.dashboard_name
+        if self.dashboard_revision_id is not None:
+            body["dashboard_revision_id"] = self.dashboard_revision_id
+        if self.override_warehouse_id is not None:
+            body["override_warehouse_id"] = self.override_warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExecutePublishedDashboardQueryRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name
-        if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id
-        if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id
+        if self.dashboard_name is not None:
+            body["dashboard_name"] = self.dashboard_name
+        if self.dashboard_revision_id is not None:
+            body["dashboard_revision_id"] = self.dashboard_revision_id
+        if self.override_warehouse_id is not None:
+            body["override_warehouse_id"] = self.override_warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExecutePublishedDashboardQueryRequest:
         """Deserializes the ExecutePublishedDashboardQueryRequest from a dictionary."""
-        return cls(dashboard_name=d.get('dashboard_name', None),
-                   dashboard_revision_id=d.get('dashboard_revision_id', None),
-                   override_warehouse_id=d.get('override_warehouse_id', None))
+        return cls(
+            dashboard_name=d.get("dashboard_name", None),
+            dashboard_revision_id=d.get("dashboard_revision_id", None),
+            override_warehouse_id=d.get("override_warehouse_id", None),
+        )
 
 
 @dataclass
@@ -359,21 +403,28 @@ class GenieAttachment:
     def as_dict(self) -> dict:
         """Serializes the GenieAttachment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.query: body['query'] = self.query.as_dict()
-        if self.text: body['text'] = self.text.as_dict()
+        if self.query:
+            body["query"] = self.query.as_dict()
+        if self.text:
+            body["text"] = self.text.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieAttachment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.query: body['query'] = self.query
-        if self.text: body['text'] = self.text
+        if self.query:
+            body["query"] = self.query
+        if self.text:
+            body["text"] = self.text
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieAttachment:
         """Deserializes the GenieAttachment from a dictionary."""
-        return cls(query=_from_dict(d, 'query', QueryAttachment), text=_from_dict(d, 'text', TextAttachment))
+        return cls(
+            query=_from_dict(d, "query", QueryAttachment),
+            text=_from_dict(d, "text", TextAttachment),
+        )
 
 
 @dataclass
@@ -399,36 +450,48 @@ class GenieConversation:
     def as_dict(self) -> dict:
         """Serializes the GenieConversation into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
-        if self.id is not None: body['id'] = self.id
+        if self.created_timestamp is not None:
+            body["created_timestamp"] = self.created_timestamp
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.space_id is not None: body['space_id'] = self.space_id
-        if self.title is not None: body['title'] = self.title
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
+        if self.title is not None:
+            body["title"] = self.title
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieConversation into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
-        if self.id is not None: body['id'] = self.id
+        if self.created_timestamp is not None:
+            body["created_timestamp"] = self.created_timestamp
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.space_id is not None: body['space_id'] = self.space_id
-        if self.title is not None: body['title'] = self.title
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
+        if self.title is not None:
+            body["title"] = self.title
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieConversation:
         """Deserializes the GenieConversation from a dictionary."""
-        return cls(created_timestamp=d.get('created_timestamp', None),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   space_id=d.get('space_id', None),
-                   title=d.get('title', None),
-                   user_id=d.get('user_id', None))
+        return cls(
+            created_timestamp=d.get("created_timestamp", None),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            space_id=d.get("space_id", None),
+            title=d.get("title", None),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -445,25 +508,33 @@ class GenieCreateConversationMessageRequest:
     def as_dict(self) -> dict:
         """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
-        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.content is not None:
+            body["content"] = self.content
+        if self.conversation_id is not None:
+            body["conversation_id"] = self.conversation_id
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
-        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.content is not None:
+            body["content"] = self.content
+        if self.conversation_id is not None:
+            body["conversation_id"] = self.conversation_id
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest:
         """Deserializes the GenieCreateConversationMessageRequest from a dictionary."""
-        return cls(content=d.get('content', None),
-                   conversation_id=d.get('conversation_id', None),
-                   space_id=d.get('space_id', None))
+        return cls(
+            content=d.get("content", None),
+            conversation_id=d.get("conversation_id", None),
+            space_id=d.get("space_id", None),
+        )
 
 
 @dataclass
@@ -475,19 +546,21 @@ class GenieGetMessageQueryResultResponse:
     def as_dict(self) -> dict:
         """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.statement_response: body['statement_response'] = self.statement_response.as_dict()
+        if self.statement_response:
+            body["statement_response"] = self.statement_response.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.statement_response: body['statement_response'] = self.statement_response
+        if self.statement_response:
+            body["statement_response"] = self.statement_response
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse:
         """Deserializes the GenieGetMessageQueryResultResponse from a dictionary."""
-        return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse))
+        return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse))
 
 
 @dataclass
@@ -540,51 +613,73 @@ class GenieMessage:
     def as_dict(self) -> dict:
         """Serializes the GenieMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.attachments: body['attachments'] = [v.as_dict() for v in self.attachments]
-        if self.content is not None: body['content'] = self.content
-        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
-        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
-        if self.error: body['error'] = self.error.as_dict()
-        if self.id is not None: body['id'] = self.id
+        if self.attachments:
+            body["attachments"] = [v.as_dict() for v in self.attachments]
+        if self.content is not None:
+            body["content"] = self.content
+        if self.conversation_id is not None:
+            body["conversation_id"] = self.conversation_id
+        if self.created_timestamp is not None:
+            body["created_timestamp"] = self.created_timestamp
+        if self.error:
+            body["error"] = self.error.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.query_result: body['query_result'] = self.query_result.as_dict()
-        if self.space_id is not None: body['space_id'] = self.space_id
-        if self.status is not None: body['status'] = self.status.value
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.query_result:
+            body["query_result"] = self.query_result.as_dict()
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.attachments: body['attachments'] = self.attachments
-        if self.content is not None: body['content'] = self.content
-        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
-        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
-        if self.error: body['error'] = self.error
-        if self.id is not None: body['id'] = self.id
+        if self.attachments:
+            body["attachments"] = self.attachments
+        if self.content is not None:
+            body["content"] = self.content
+        if self.conversation_id is not None:
+            body["conversation_id"] = self.conversation_id
+        if self.created_timestamp is not None:
+            body["created_timestamp"] = self.created_timestamp
+        if self.error:
+            body["error"] = self.error
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.query_result: body['query_result'] = self.query_result
-        if self.space_id is not None: body['space_id'] = self.space_id
-        if self.status is not None: body['status'] = self.status
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.query_result:
+            body["query_result"] = self.query_result
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
+        if self.status is not None:
+            body["status"] = self.status
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieMessage:
         """Deserializes the GenieMessage from a dictionary."""
-        return cls(attachments=_repeated_dict(d, 'attachments', GenieAttachment),
-                   content=d.get('content', None),
-                   conversation_id=d.get('conversation_id', None),
-                   created_timestamp=d.get('created_timestamp', None),
-                   error=_from_dict(d, 'error', MessageError),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   query_result=_from_dict(d, 'query_result', Result),
-                   space_id=d.get('space_id', None),
-                   status=_enum(d, 'status', MessageStatus),
-                   user_id=d.get('user_id', None))
+        return cls(
+            attachments=_repeated_dict(d, "attachments", GenieAttachment),
+            content=d.get("content", None),
+            conversation_id=d.get("conversation_id", None),
+            created_timestamp=d.get("created_timestamp", None),
+            error=_from_dict(d, "error", MessageError),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            query_result=_from_dict(d, "query_result", Result),
+            space_id=d.get("space_id", None),
+            status=_enum(d, "status", MessageStatus),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -598,21 +693,25 @@ class GenieStartConversationMessageRequest:
     def as_dict(self) -> dict:
         """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.content is not None:
+            body["content"] = self.content
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.content is not None:
+            body["content"] = self.content
+        if self.space_id is not None:
+            body["space_id"] = self.space_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest:
         """Deserializes the GenieStartConversationMessageRequest from a dictionary."""
-        return cls(content=d.get('content', None), space_id=d.get('space_id', None))
+        return cls(content=d.get("content", None), space_id=d.get("space_id", None))
 
 
 @dataclass
@@ -630,28 +729,38 @@ class GenieStartConversationResponse:
     def as_dict(self) -> dict:
         """Serializes the GenieStartConversationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.conversation: body['conversation'] = self.conversation.as_dict()
-        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
-        if self.message: body['message'] = self.message.as_dict()
-        if self.message_id is not None: body['message_id'] = self.message_id
+        if self.conversation:
+            body["conversation"] = self.conversation.as_dict()
+        if self.conversation_id is not None:
+            body["conversation_id"] = self.conversation_id
+        if self.message:
+            body["message"] = self.message.as_dict()
+        if self.message_id is not None:
+            body["message_id"] = self.message_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenieStartConversationResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.conversation: body['conversation'] = self.conversation
-        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
-        if self.message: body['message'] = self.message
-        if self.message_id is not None: body['message_id'] = self.message_id
+        if self.conversation:
+            body["conversation"] = self.conversation
+        if self.conversation_id is not None:
+            body["conversation_id"] = self.conversation_id
+        if self.message:
+            body["message"] = self.message
+        if self.message_id is not None:
+            body["message_id"] = self.message_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse:
         """Deserializes the GenieStartConversationResponse from a dictionary."""
-        return cls(conversation=_from_dict(d, 'conversation', GenieConversation),
-                   conversation_id=d.get('conversation_id', None),
-                   message=_from_dict(d, 'message', GenieMessage),
-                   message_id=d.get('message_id', None))
+        return cls(
+            conversation=_from_dict(d, "conversation", GenieConversation),
+            conversation_id=d.get("conversation_id", None),
+            message=_from_dict(d, "message", GenieMessage),
+            message_id=d.get("message_id", None),
+        )
 
 
 @dataclass
@@ -675,8 +784,8 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedDashboardEmbeddedResponse:
 
 class LifecycleState(Enum):
 
-    ACTIVE = 'ACTIVE'
-    TRASHED = 'TRASHED'
+    ACTIVE = "ACTIVE"
+    TRASHED = "TRASHED"
 
 
 @dataclass
@@ -690,22 +799,28 @@ class ListDashboardsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListDashboardsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboards: body['dashboards'] = [v.as_dict() for v in self.dashboards]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.dashboards:
+            body["dashboards"] = [v.as_dict() for v in self.dashboards]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListDashboardsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboards: body['dashboards'] = self.dashboards
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.dashboards:
+            body["dashboards"] = self.dashboards
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListDashboardsResponse:
         """Deserializes the ListDashboardsResponse from a dictionary."""
-        return cls(dashboards=_repeated_dict(d, 'dashboards', Dashboard),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            dashboards=_repeated_dict(d, "dashboards", Dashboard),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -719,22 +834,28 @@ class ListSchedulesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListSchedulesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.schedules: body['schedules'] = [v.as_dict() for v in self.schedules]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.schedules:
+            body["schedules"] = [v.as_dict() for v in self.schedules]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListSchedulesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.schedules: body['schedules'] = self.schedules
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.schedules:
+            body["schedules"] = self.schedules
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSchedulesResponse:
         """Deserializes the ListSchedulesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   schedules=_repeated_dict(d, 'schedules', Schedule))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            schedules=_repeated_dict(d, "schedules", Schedule),
+        )
 
 
 @dataclass
@@ -748,22 +869,28 @@ class ListSubscriptionsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListSubscriptionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.subscriptions:
+            body["subscriptions"] = [v.as_dict() for v in self.subscriptions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListSubscriptionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.subscriptions:
+            body["subscriptions"] = self.subscriptions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse:
         """Deserializes the ListSubscriptionsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   subscriptions=_repeated_dict(d, 'subscriptions', Subscription))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            subscriptions=_repeated_dict(d, "subscriptions", Subscription),
+        )
 
 
 @dataclass
@@ -775,66 +902,70 @@ class MessageError:
     def as_dict(self) -> dict:
         """Serializes the MessageError into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error is not None: body['error'] = self.error
-        if self.type is not None: body['type'] = self.type.value
+        if self.error is not None:
+            body["error"] = self.error
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MessageError into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error is not None: body['error'] = self.error
-        if self.type is not None: body['type'] = self.type
+        if self.error is not None:
+            body["error"] = self.error
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MessageError:
         """Deserializes the MessageError from a dictionary."""
-        return cls(error=d.get('error', None), type=_enum(d, 'type', MessageErrorType))
+        return cls(error=d.get("error", None), type=_enum(d, "type", MessageErrorType))
 
 
 class MessageErrorType(Enum):
 
-    BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = 'BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION'
-    CHAT_COMPLETION_CLIENT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_EXCEPTION'
-    CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION'
-    CHAT_COMPLETION_NETWORK_EXCEPTION = 'CHAT_COMPLETION_NETWORK_EXCEPTION'
-    CONTENT_FILTER_EXCEPTION = 'CONTENT_FILTER_EXCEPTION'
-    CONTEXT_EXCEEDED_EXCEPTION = 'CONTEXT_EXCEEDED_EXCEPTION'
-    COULD_NOT_GET_UC_SCHEMA_EXCEPTION = 'COULD_NOT_GET_UC_SCHEMA_EXCEPTION'
-    DEPLOYMENT_NOT_FOUND_EXCEPTION = 'DEPLOYMENT_NOT_FOUND_EXCEPTION'
-    FUNCTIONS_NOT_AVAILABLE_EXCEPTION = 'FUNCTIONS_NOT_AVAILABLE_EXCEPTION'
-    FUNCTION_ARGUMENTS_INVALID_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_EXCEPTION'
-    FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION'
-    FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = 'FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION'
-    GENERIC_CHAT_COMPLETION_EXCEPTION = 'GENERIC_CHAT_COMPLETION_EXCEPTION'
-    GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = 'GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION'
-    GENERIC_SQL_EXEC_API_CALL_EXCEPTION = 'GENERIC_SQL_EXEC_API_CALL_EXCEPTION'
-    ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = 'ILLEGAL_PARAMETER_DEFINITION_EXCEPTION'
-    INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION'
-    INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION'
-    INVALID_CHAT_COMPLETION_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_JSON_EXCEPTION'
-    INVALID_COMPLETION_REQUEST_EXCEPTION = 'INVALID_COMPLETION_REQUEST_EXCEPTION'
-    INVALID_FUNCTION_CALL_EXCEPTION = 'INVALID_FUNCTION_CALL_EXCEPTION'
-    INVALID_TABLE_IDENTIFIER_EXCEPTION = 'INVALID_TABLE_IDENTIFIER_EXCEPTION'
-    LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION'
-    MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION'
-    MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION'
-    NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = 'NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE'
-    NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION'
-    NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION'
-    RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION'
-    RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION'
-    REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION'
-    RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION'
-    SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION'
-    STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE'
-    TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION'
-    TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION'
-    TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION'
-    UNEXPECTED_REPLY_PROCESS_EXCEPTION = 'UNEXPECTED_REPLY_PROCESS_EXCEPTION'
-    UNKNOWN_AI_MODEL = 'UNKNOWN_AI_MODEL'
-    WAREHOUSE_ACCESS_MISSING_EXCEPTION = 'WAREHOUSE_ACCESS_MISSING_EXCEPTION'
-    WAREHOUSE_NOT_FOUND_EXCEPTION = 'WAREHOUSE_NOT_FOUND_EXCEPTION'
+    BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = "BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION"
+    CHAT_COMPLETION_CLIENT_EXCEPTION = "CHAT_COMPLETION_CLIENT_EXCEPTION"
+    CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = "CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION"
+    CHAT_COMPLETION_NETWORK_EXCEPTION = "CHAT_COMPLETION_NETWORK_EXCEPTION"
+    CONTENT_FILTER_EXCEPTION = "CONTENT_FILTER_EXCEPTION"
+    CONTEXT_EXCEEDED_EXCEPTION = "CONTEXT_EXCEEDED_EXCEPTION"
+    COULD_NOT_GET_UC_SCHEMA_EXCEPTION = "COULD_NOT_GET_UC_SCHEMA_EXCEPTION"
+    DEPLOYMENT_NOT_FOUND_EXCEPTION = "DEPLOYMENT_NOT_FOUND_EXCEPTION"
+    FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION"
+    FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION"
+    FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION"
+    FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION"
+    GENERIC_CHAT_COMPLETION_EXCEPTION = "GENERIC_CHAT_COMPLETION_EXCEPTION"
+    GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION"
+    GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION"
+    ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION"
+    INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION"
+    INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION"
+    INVALID_CHAT_COMPLETION_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_JSON_EXCEPTION"
+    INVALID_COMPLETION_REQUEST_EXCEPTION = "INVALID_COMPLETION_REQUEST_EXCEPTION"
+    INVALID_FUNCTION_CALL_EXCEPTION = "INVALID_FUNCTION_CALL_EXCEPTION"
+    INVALID_TABLE_IDENTIFIER_EXCEPTION = "INVALID_TABLE_IDENTIFIER_EXCEPTION"
+    LOCAL_CONTEXT_EXCEEDED_EXCEPTION = "LOCAL_CONTEXT_EXCEEDED_EXCEPTION"
+    MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION"
+    MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
+    NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE"
+    NO_QUERY_TO_VISUALIZE_EXCEPTION = "NO_QUERY_TO_VISUALIZE_EXCEPTION"
+    NO_TABLES_TO_QUERY_EXCEPTION = "NO_TABLES_TO_QUERY_EXCEPTION"
+    RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = "RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION"
+    RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = "RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION"
+    REPLY_PROCESS_TIMEOUT_EXCEPTION = "REPLY_PROCESS_TIMEOUT_EXCEPTION"
+    RETRYABLE_PROCESSING_EXCEPTION = "RETRYABLE_PROCESSING_EXCEPTION"
+    SQL_EXECUTION_EXCEPTION = "SQL_EXECUTION_EXCEPTION"
+    STOP_PROCESS_DUE_TO_AUTO_REGENERATE = "STOP_PROCESS_DUE_TO_AUTO_REGENERATE"
+    TABLES_MISSING_EXCEPTION = "TABLES_MISSING_EXCEPTION"
+    TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = "TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION"
+    TOO_MANY_TABLES_EXCEPTION = "TOO_MANY_TABLES_EXCEPTION"
+    UNEXPECTED_REPLY_PROCESS_EXCEPTION = "UNEXPECTED_REPLY_PROCESS_EXCEPTION"
+    UNKNOWN_AI_MODEL = "UNKNOWN_AI_MODEL"
+    WAREHOUSE_ACCESS_MISSING_EXCEPTION = "WAREHOUSE_ACCESS_MISSING_EXCEPTION"
+    WAREHOUSE_NOT_FOUND_EXCEPTION = "WAREHOUSE_NOT_FOUND_EXCEPTION"
 
 
 class MessageStatus(Enum):
@@ -850,18 +981,19 @@ class MessageStatus(Enum):
     processing is completed. Results are in the `attachments` field. Get the SQL query result by
     calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message
     has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user
-    needs to execute the query again. * `CANCELLED`: Message has been cancelled."""
+    needs to execute the query again. * `CANCELLED`: Message has been cancelled.
+    """
 
-    ASKING_AI = 'ASKING_AI'
-    CANCELLED = 'CANCELLED'
-    COMPLETED = 'COMPLETED'
-    EXECUTING_QUERY = 'EXECUTING_QUERY'
-    FAILED = 'FAILED'
-    FETCHING_METADATA = 'FETCHING_METADATA'
-    FILTERING_CONTEXT = 'FILTERING_CONTEXT'
-    PENDING_WAREHOUSE = 'PENDING_WAREHOUSE'
-    QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED'
-    SUBMITTED = 'SUBMITTED'
+    ASKING_AI = "ASKING_AI"
+    CANCELLED = "CANCELLED"
+    COMPLETED = "COMPLETED"
+    EXECUTING_QUERY = "EXECUTING_QUERY"
+    FAILED = "FAILED"
+    FETCHING_METADATA = "FETCHING_METADATA"
+    FILTERING_CONTEXT = "FILTERING_CONTEXT"
+    PENDING_WAREHOUSE = "PENDING_WAREHOUSE"
+    QUERY_RESULT_EXPIRED = "QUERY_RESULT_EXPIRED"
+    SUBMITTED = "SUBMITTED"
 
 
 @dataclass
@@ -882,30 +1014,38 @@ class MigrateDashboardRequest:
     def as_dict(self) -> dict:
         """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.source_dashboard_id is not None:
+            body["source_dashboard_id"] = self.source_dashboard_id
         if self.update_parameter_syntax is not None:
-            body['update_parameter_syntax'] = self.update_parameter_syntax
+            body["update_parameter_syntax"] = self.update_parameter_syntax
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.source_dashboard_id is not None:
+            body["source_dashboard_id"] = self.source_dashboard_id
         if self.update_parameter_syntax is not None:
-            body['update_parameter_syntax'] = self.update_parameter_syntax
+            body["update_parameter_syntax"] = self.update_parameter_syntax
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest:
         """Deserializes the MigrateDashboardRequest from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   parent_path=d.get('parent_path', None),
-                   source_dashboard_id=d.get('source_dashboard_id', None),
-                   update_parameter_syntax=d.get('update_parameter_syntax', None))
+        return cls(
+            display_name=d.get("display_name", None),
+            parent_path=d.get("parent_path", None),
+            source_dashboard_id=d.get("source_dashboard_id", None),
+            update_parameter_syntax=d.get("update_parameter_syntax", None),
+        )
 
 
 @dataclass
@@ -917,19 +1057,21 @@ class PendingStatus:
     def as_dict(self) -> dict:
         """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.data_token is not None:
+            body["data_token"] = self.data_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PendingStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.data_token is not None:
+            body["data_token"] = self.data_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PendingStatus:
         """Deserializes the PendingStatus from a dictionary."""
-        return cls(data_token=d.get('data_token', None))
+        return cls(data_token=d.get("data_token", None))
 
 
 @dataclass
@@ -939,19 +1081,21 @@ class PollQueryStatusResponse:
     def as_dict(self) -> dict:
         """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data: body['data'] = [v.as_dict() for v in self.data]
+        if self.data:
+            body["data"] = [v.as_dict() for v in self.data]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data: body['data'] = self.data
+        if self.data:
+            body["data"] = self.data
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponse:
         """Deserializes the PollQueryStatusResponse from a dictionary."""
-        return cls(data=_repeated_dict(d, 'data', PollQueryStatusResponseData))
+        return cls(data=_repeated_dict(d, "data", PollQueryStatusResponseData))
 
 
 @dataclass
@@ -961,19 +1105,21 @@ class PollQueryStatusResponseData:
     def as_dict(self) -> dict:
         """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.status: body['status'] = self.status.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.status: body['status'] = self.status
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponseData:
         """Deserializes the PollQueryStatusResponseData from a dictionary."""
-        return cls(status=_from_dict(d, 'status', QueryResponseStatus))
+        return cls(status=_from_dict(d, "status", QueryResponseStatus))
 
 
 @dataclass
@@ -991,25 +1137,33 @@ class PublishRequest:
     def as_dict(self) -> dict:
         """Serializes the PublishRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.embed_credentials is not None:
+            body["embed_credentials"] = self.embed_credentials
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PublishRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.embed_credentials is not None:
+            body["embed_credentials"] = self.embed_credentials
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishRequest:
         """Deserializes the PublishRequest from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   embed_credentials=d.get('embed_credentials', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            embed_credentials=d.get("embed_credentials", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -1029,28 +1183,38 @@ class PublishedDashboard:
     def as_dict(self) -> dict:
         """Serializes the PublishedDashboard into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
-        if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.embed_credentials is not None:
+            body["embed_credentials"] = self.embed_credentials
+        if self.revision_create_time is not None:
+            body["revision_create_time"] = self.revision_create_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PublishedDashboard into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
-        if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.embed_credentials is not None:
+            body["embed_credentials"] = self.embed_credentials
+        if self.revision_create_time is not None:
+            body["revision_create_time"] = self.revision_create_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
         """Deserializes the PublishedDashboard from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   embed_credentials=d.get('embed_credentials', None),
-                   revision_create_time=d.get('revision_create_time', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            display_name=d.get("display_name", None),
+            embed_credentials=d.get("embed_credentials", None),
+            revision_create_time=d.get("revision_create_time", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -1083,45 +1247,63 @@ class QueryAttachment:
     def as_dict(self) -> dict:
         """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema.as_dict()
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
-        if self.instruction_title is not None: body['instruction_title'] = self.instruction_title
+        if self.cached_query_schema:
+            body["cached_query_schema"] = self.cached_query_schema.as_dict()
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instruction_id is not None:
+            body["instruction_id"] = self.instruction_id
+        if self.instruction_title is not None:
+            body["instruction_title"] = self.instruction_title
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.query is not None: body['query'] = self.query
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.title is not None: body['title'] = self.title
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.query is not None:
+            body["query"] = self.query
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
+        if self.title is not None:
+            body["title"] = self.title
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryAttachment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
-        if self.instruction_title is not None: body['instruction_title'] = self.instruction_title
+        if self.cached_query_schema:
+            body["cached_query_schema"] = self.cached_query_schema
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instruction_id is not None:
+            body["instruction_id"] = self.instruction_id
+        if self.instruction_title is not None:
+            body["instruction_title"] = self.instruction_title
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.query is not None: body['query'] = self.query
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.title is not None: body['title'] = self.title
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.query is not None:
+            body["query"] = self.query
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
+        if self.title is not None:
+            body["title"] = self.title
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
         """Deserializes the QueryAttachment from a dictionary."""
-        return cls(cached_query_schema=_from_dict(d, 'cached_query_schema', QuerySchema),
-                   description=d.get('description', None),
-                   id=d.get('id', None),
-                   instruction_id=d.get('instruction_id', None),
-                   instruction_title=d.get('instruction_title', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   query=d.get('query', None),
-                   statement_id=d.get('statement_id', None),
-                   title=d.get('title', None))
+        return cls(
+            cached_query_schema=_from_dict(d, "cached_query_schema", QuerySchema),
+            description=d.get("description", None),
+            id=d.get("id", None),
+            instruction_id=d.get("instruction_id", None),
+            instruction_title=d.get("instruction_title", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            query=d.get("query", None),
+            statement_id=d.get("statement_id", None),
+            title=d.get("title", None),
+        )
 
 
 @dataclass
@@ -1146,31 +1328,43 @@ class QueryResponseStatus:
     def as_dict(self) -> dict:
         """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.canceled: body['canceled'] = self.canceled.as_dict()
-        if self.closed: body['closed'] = self.closed.as_dict()
-        if self.pending: body['pending'] = self.pending.as_dict()
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.success: body['success'] = self.success.as_dict()
+        if self.canceled:
+            body["canceled"] = self.canceled.as_dict()
+        if self.closed:
+            body["closed"] = self.closed.as_dict()
+        if self.pending:
+            body["pending"] = self.pending.as_dict()
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
+        if self.success:
+            body["success"] = self.success.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.canceled: body['canceled'] = self.canceled
-        if self.closed: body['closed'] = self.closed
-        if self.pending: body['pending'] = self.pending
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.success: body['success'] = self.success
+        if self.canceled:
+            body["canceled"] = self.canceled
+        if self.closed:
+            body["closed"] = self.closed
+        if self.pending:
+            body["pending"] = self.pending
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
+        if self.success:
+            body["success"] = self.success
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryResponseStatus:
         """Deserializes the QueryResponseStatus from a dictionary."""
-        return cls(canceled=_from_dict(d, 'canceled', Empty),
-                   closed=_from_dict(d, 'closed', Empty),
-                   pending=_from_dict(d, 'pending', PendingStatus),
-                   statement_id=d.get('statement_id', None),
-                   success=_from_dict(d, 'success', SuccessStatus))
+        return cls(
+            canceled=_from_dict(d, "canceled", Empty),
+            closed=_from_dict(d, "closed", Empty),
+            pending=_from_dict(d, "pending", PendingStatus),
+            statement_id=d.get("statement_id", None),
+            success=_from_dict(d, "success", SuccessStatus),
+        )
 
 
 @dataclass
@@ -1184,22 +1378,28 @@ class QuerySchema:
     def as_dict(self) -> dict:
         """Serializes the QuerySchema into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QuerySchema into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns: body['columns'] = self.columns
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.columns:
+            body["columns"] = self.columns
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QuerySchema:
         """Deserializes the QuerySchema from a dictionary."""
-        return cls(columns=_repeated_dict(d, 'columns', QuerySchemaColumn),
-                   statement_id=d.get('statement_id', None))
+        return cls(
+            columns=_repeated_dict(d, "columns", QuerySchemaColumn),
+            statement_id=d.get("statement_id", None),
+        )
 
 
 @dataclass
@@ -1215,25 +1415,33 @@ class QuerySchemaColumn:
     def as_dict(self) -> dict:
         """Serializes the QuerySchemaColumn into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_type is not None: body['data_type'] = self.data_type.value
-        if self.name is not None: body['name'] = self.name
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.data_type is not None:
+            body["data_type"] = self.data_type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QuerySchemaColumn into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_type is not None: body['data_type'] = self.data_type
-        if self.name is not None: body['name'] = self.name
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.data_type is not None:
+            body["data_type"] = self.data_type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QuerySchemaColumn:
         """Deserializes the QuerySchemaColumn from a dictionary."""
-        return cls(data_type=_enum(d, 'data_type', DataType),
-                   name=d.get('name', None),
-                   type_text=d.get('type_text', None))
+        return cls(
+            data_type=_enum(d, "data_type", DataType),
+            name=d.get("name", None),
+            type_text=d.get("type_text", None),
+        )
 
 
 @dataclass
@@ -1251,25 +1459,33 @@ class Result:
     def as_dict(self) -> dict:
         """Serializes the Result into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.is_truncated is not None:
+            body["is_truncated"] = self.is_truncated
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Result into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.is_truncated is not None:
+            body["is_truncated"] = self.is_truncated
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Result:
         """Deserializes the Result from a dictionary."""
-        return cls(is_truncated=d.get('is_truncated', None),
-                   row_count=d.get('row_count', None),
-                   statement_id=d.get('statement_id', None))
+        return cls(
+            is_truncated=d.get("is_truncated", None),
+            row_count=d.get("row_count", None),
+            statement_id=d.get("statement_id", None),
+        )
 
 
 @dataclass
@@ -1306,49 +1522,69 @@ class Schedule:
     def as_dict(self) -> dict:
         """Serializes the Schedule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict()
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.cron_schedule:
+            body["cron_schedule"] = self.cron_schedule.as_dict()
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status.value
+        if self.schedule_id is not None:
+            body["schedule_id"] = self.schedule_id
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Schedule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.pause_status is not None: body['pause_status'] = self.pause_status
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.cron_schedule:
+            body["cron_schedule"] = self.cron_schedule
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status
+        if self.schedule_id is not None:
+            body["schedule_id"] = self.schedule_id
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Schedule:
         """Deserializes the Schedule from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule),
-                   dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   etag=d.get('etag', None),
-                   pause_status=_enum(d, 'pause_status', SchedulePauseStatus),
-                   schedule_id=d.get('schedule_id', None),
-                   update_time=d.get('update_time', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            cron_schedule=_from_dict(d, "cron_schedule", CronSchedule),
+            dashboard_id=d.get("dashboard_id", None),
+            display_name=d.get("display_name", None),
+            etag=d.get("etag", None),
+            pause_status=_enum(d, "pause_status", SchedulePauseStatus),
+            schedule_id=d.get("schedule_id", None),
+            update_time=d.get("update_time", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 class SchedulePauseStatus(Enum):
 
-    PAUSED = 'PAUSED'
-    UNPAUSED = 'UNPAUSED'
+    PAUSED = "PAUSED"
+    UNPAUSED = "UNPAUSED"
 
 
 @dataclass
@@ -1364,23 +1600,28 @@ class Subscriber:
     def as_dict(self) -> dict:
         """Serializes the Subscriber into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber.as_dict()
-        if self.user_subscriber: body['user_subscriber'] = self.user_subscriber.as_dict()
+        if self.destination_subscriber:
+            body["destination_subscriber"] = self.destination_subscriber.as_dict()
+        if self.user_subscriber:
+            body["user_subscriber"] = self.user_subscriber.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Subscriber into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber
-        if self.user_subscriber: body['user_subscriber'] = self.user_subscriber
+        if self.destination_subscriber:
+            body["destination_subscriber"] = self.destination_subscriber
+        if self.user_subscriber:
+            body["user_subscriber"] = self.user_subscriber
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Subscriber:
         """Deserializes the Subscriber from a dictionary."""
-        return cls(destination_subscriber=_from_dict(d, 'destination_subscriber',
-                                                     SubscriptionSubscriberDestination),
-                   user_subscriber=_from_dict(d, 'user_subscriber', SubscriptionSubscriberUser))
+        return cls(
+            destination_subscriber=_from_dict(d, "destination_subscriber", SubscriptionSubscriberDestination),
+            user_subscriber=_from_dict(d, "user_subscriber", SubscriptionSubscriberUser),
+        )
 
 
 @dataclass
@@ -1414,40 +1655,58 @@ class Subscription:
     def as_dict(self) -> dict:
         """Serializes the Subscription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.etag is not None: body['etag'] = self.etag
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        if self.subscriber: body['subscriber'] = self.subscriber.as_dict()
-        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.created_by_user_id is not None:
+            body["created_by_user_id"] = self.created_by_user_id
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.schedule_id is not None:
+            body["schedule_id"] = self.schedule_id
+        if self.subscriber:
+            body["subscriber"] = self.subscriber.as_dict()
+        if self.subscription_id is not None:
+            body["subscription_id"] = self.subscription_id
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Subscription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.etag is not None: body['etag'] = self.etag
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        if self.subscriber: body['subscriber'] = self.subscriber
-        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.created_by_user_id is not None:
+            body["created_by_user_id"] = self.created_by_user_id
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.schedule_id is not None:
+            body["schedule_id"] = self.schedule_id
+        if self.subscriber:
+            body["subscriber"] = self.subscriber
+        if self.subscription_id is not None:
+            body["subscription_id"] = self.subscription_id
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Subscription:
         """Deserializes the Subscription from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   created_by_user_id=d.get('created_by_user_id', None),
-                   dashboard_id=d.get('dashboard_id', None),
-                   etag=d.get('etag', None),
-                   schedule_id=d.get('schedule_id', None),
-                   subscriber=_from_dict(d, 'subscriber', Subscriber),
-                   subscription_id=d.get('subscription_id', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            created_by_user_id=d.get("created_by_user_id", None),
+            dashboard_id=d.get("dashboard_id", None),
+            etag=d.get("etag", None),
+            schedule_id=d.get("schedule_id", None),
+            subscriber=_from_dict(d, "subscriber", Subscriber),
+            subscription_id=d.get("subscription_id", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -1458,19 +1717,21 @@ class SubscriptionSubscriberDestination:
     def as_dict(self) -> dict:
         """Serializes the SubscriptionSubscriberDestination into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_id is not None: body['destination_id'] = self.destination_id
+        if self.destination_id is not None:
+            body["destination_id"] = self.destination_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SubscriptionSubscriberDestination into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_id is not None: body['destination_id'] = self.destination_id
+        if self.destination_id is not None:
+            body["destination_id"] = self.destination_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberDestination:
         """Deserializes the SubscriptionSubscriberDestination from a dictionary."""
-        return cls(destination_id=d.get('destination_id', None))
+        return cls(destination_id=d.get("destination_id", None))
 
 
 @dataclass
@@ -1481,19 +1742,21 @@ class SubscriptionSubscriberUser:
     def as_dict(self) -> dict:
         """Serializes the SubscriptionSubscriberUser into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SubscriptionSubscriberUser into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser:
         """Deserializes the SubscriptionSubscriberUser from a dictionary."""
-        return cls(user_id=d.get('user_id', None))
+        return cls(user_id=d.get("user_id", None))
 
 
 @dataclass
@@ -1508,21 +1771,28 @@ class SuccessStatus:
     def as_dict(self) -> dict:
         """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_token is not None: body['data_token'] = self.data_token
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.data_token is not None:
+            body["data_token"] = self.data_token
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_token is not None: body['data_token'] = self.data_token
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.data_token is not None:
+            body["data_token"] = self.data_token
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SuccessStatus:
         """Deserializes the SuccessStatus from a dictionary."""
-        return cls(data_token=d.get('data_token', None), truncated=d.get('truncated', None))
+        return cls(
+            data_token=d.get("data_token", None),
+            truncated=d.get("truncated", None),
+        )
 
 
 @dataclass
@@ -1535,21 +1805,25 @@ class TextAttachment:
     def as_dict(self) -> dict:
         """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.id is not None: body['id'] = self.id
+        if self.content is not None:
+            body["content"] = self.content
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TextAttachment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.id is not None: body['id'] = self.id
+        if self.content is not None:
+            body["content"] = self.content
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TextAttachment:
         """Deserializes the TextAttachment from a dictionary."""
-        return cls(content=d.get('content', None), id=d.get('id', None))
+        return cls(content=d.get("content", None), id=d.get("id", None))
 
 
 @dataclass
@@ -1600,157 +1874,188 @@ def __init__(self, api_client):
         self._api = api_client
 
     def wait_get_message_genie_completed(
-            self,
-            conversation_id: str,
-            message_id: str,
-            space_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage:
+        self,
+        conversation_id: str,
+        message_id: str,
+        space_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[GenieMessage], None]] = None,
+    ) -> GenieMessage:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (MessageStatus.COMPLETED, )
-        failure_states = (MessageStatus.FAILED, )
-        status_message = 'polling...'
+        target_states = (MessageStatus.COMPLETED,)
+        failure_states = (MessageStatus.FAILED,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
-            poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id)
+            poll = self.get_message(
+                conversation_id=conversation_id,
+                message_id=message_id,
+                space_id=space_id,
+            )
             status = poll.status
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach COMPLETED, got {status}: {status_message}'
+                msg = f"failed to reach COMPLETED, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
         """Create conversation message.
-        
+
         Create new message in [conversation](:method:genie/startconversation). The AI response uses all
         previously created messages in the conversation to respond.
-        
+
         :param space_id: str
           The ID associated with the Genie space where the conversation is started.
         :param conversation_id: str
           The ID associated with the conversation.
         :param content: str
           User message content.
-        
+
         :returns:
           Long-running operation waiter for :class:`GenieMessage`.
           See :method:wait_get_message_genie_completed for more details.
         """
         body = {}
-        if content is not None: body['content'] = content
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if content is not None:
+            body["content"] = content
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         op_response = self._api.do(
-            'POST',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages',
+            "POST",
+            f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages",
             body=body,
-            headers=headers)
-        return Wait(self.wait_get_message_genie_completed,
-                    response=GenieMessage.from_dict(op_response),
-                    conversation_id=conversation_id,
-                    message_id=op_response['id'],
-                    space_id=space_id)
-
-    def create_message_and_wait(self,
-                                space_id: str,
-                                conversation_id: str,
-                                content: str,
-                                timeout=timedelta(minutes=20)) -> GenieMessage:
-        return self.create_message(content=content, conversation_id=conversation_id,
-                                   space_id=space_id).result(timeout=timeout)
-
-    def execute_message_query(self, space_id: str, conversation_id: str,
-                              message_id: str) -> GenieGetMessageQueryResultResponse:
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_message_genie_completed,
+            response=GenieMessage.from_dict(op_response),
+            conversation_id=conversation_id,
+            message_id=op_response["id"],
+            space_id=space_id,
+        )
+
+    def create_message_and_wait(
+        self,
+        space_id: str,
+        conversation_id: str,
+        content: str,
+        timeout=timedelta(minutes=20),
+    ) -> GenieMessage:
+        return self.create_message(content=content, conversation_id=conversation_id, space_id=space_id).result(
+            timeout=timeout
+        )
+
+    def execute_message_query(
+        self, space_id: str, conversation_id: str, message_id: str
+    ) -> GenieGetMessageQueryResultResponse:
         """Execute SQL query in a conversation message.
-        
+
         Execute the SQL query in the message.
-        
+
         :param space_id: str
           Genie space ID
         :param conversation_id: str
           Conversation ID
         :param message_id: str
           Message ID
-        
+
         :returns: :class:`GenieGetMessageQueryResultResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'POST',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query',
-            headers=headers)
+            "POST",
+            f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query",
+            headers=headers,
+        )
         return GenieGetMessageQueryResultResponse.from_dict(res)
 
     def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
         """Get conversation message.
-        
+
         Get message from conversation.
-        
+
         :param space_id: str
           The ID associated with the Genie space where the target conversation is located.
         :param conversation_id: str
           The ID associated with the target conversation.
         :param message_id: str
           The ID associated with the target message from the identified conversation.
-        
+
         :returns: :class:`GenieMessage`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}",
+            headers=headers,
+        )
         return GenieMessage.from_dict(res)
 
-    def get_message_query_result(self, space_id: str, conversation_id: str,
-                                 message_id: str) -> GenieGetMessageQueryResultResponse:
+    def get_message_query_result(
+        self, space_id: str, conversation_id: str, message_id: str
+    ) -> GenieGetMessageQueryResultResponse:
         """Get conversation message SQL query result.
-        
+
         Get the result of SQL query if the message has a query attachment. This is only available if a message
         has a query attachment and the message status is `EXECUTING_QUERY`.
-        
+
         :param space_id: str
           Genie space ID
         :param conversation_id: str
           Conversation ID
         :param message_id: str
           Message ID
-        
+
         :returns: :class:`GenieGetMessageQueryResultResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result',
-            headers=headers)
+            "GET",
+            f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result",
+            headers=headers,
+        )
         return GenieGetMessageQueryResultResponse.from_dict(res)
 
-    def get_message_query_result_by_attachment(self, space_id: str, conversation_id: str, message_id: str,
-                                               attachment_id: str) -> GenieGetMessageQueryResultResponse:
+    def get_message_query_result_by_attachment(
+        self,
+        space_id: str,
+        conversation_id: str,
+        message_id: str,
+        attachment_id: str,
+    ) -> GenieGetMessageQueryResultResponse:
         """Get conversation message SQL query result by attachment id.
-        
+
         Get the result of SQL query by attachment id This is only available if a message has a query
         attachment and the message status is `EXECUTING_QUERY`.
-        
+
         :param space_id: str
           Genie space ID
         :param conversation_id: str
@@ -1759,48 +2064,58 @@ def get_message_query_result_by_attachment(self, space_id: str, conversation_id:
           Message ID
         :param attachment_id: str
           Attachment ID
-        
+
         :returns: :class:`GenieGetMessageQueryResultResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}",
+            headers=headers,
+        )
         return GenieGetMessageQueryResultResponse.from_dict(res)
 
     def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
         """Start conversation.
-        
+
         Start a new conversation.
-        
+
         :param space_id: str
           The ID associated with the Genie space where you want to start a conversation.
         :param content: str
           The text of the message that starts the conversation.
-        
+
         :returns:
           Long-running operation waiter for :class:`GenieMessage`.
           See :method:wait_get_message_genie_completed for more details.
         """
         body = {}
-        if content is not None: body['content'] = content
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST',
-                                   f'/api/2.0/genie/spaces/{space_id}/start-conversation',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_message_genie_completed,
-                    response=GenieStartConversationResponse.from_dict(op_response),
-                    conversation_id=op_response['conversation_id'],
-                    message_id=op_response['message_id'],
-                    space_id=space_id)
-
-    def start_conversation_and_wait(self, space_id: str, content: str,
-                                    timeout=timedelta(minutes=20)) -> GenieMessage:
+        if content is not None:
+            body["content"] = content
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "POST",
+            f"/api/2.0/genie/spaces/{space_id}/start-conversation",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_message_genie_completed,
+            response=GenieStartConversationResponse.from_dict(op_response),
+            conversation_id=op_response["conversation_id"],
+            message_id=op_response["message_id"],
+            space_id=space_id,
+        )
+
+    def start_conversation_and_wait(self, space_id: str, content: str, timeout=timedelta(minutes=20)) -> GenieMessage:
         return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
 
 
@@ -1813,65 +2128,85 @@ def __init__(self, api_client):
 
     def create(self, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         """Create dashboard.
-        
+
         Create a draft dashboard.
-        
+
         :param dashboard: :class:`Dashboard` (optional)
-        
+
         :returns: :class:`Dashboard`
         """
         body = dashboard.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers)
+        res = self._api.do("POST", "/api/2.0/lakeview/dashboards", body=body, headers=headers)
         return Dashboard.from_dict(res)
 
     def create_schedule(self, dashboard_id: str, *, schedule: Optional[Schedule] = None) -> Schedule:
         """Create dashboard schedule.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule: :class:`Schedule` (optional)
-        
+
         :returns: :class:`Schedule`
         """
         body = schedule.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules",
+            body=body,
+            headers=headers,
+        )
         return Schedule.from_dict(res)
 
-    def create_subscription(self,
-                            dashboard_id: str,
-                            schedule_id: str,
-                            *,
-                            subscription: Optional[Subscription] = None) -> Subscription:
+    def create_subscription(
+        self,
+        dashboard_id: str,
+        schedule_id: str,
+        *,
+        subscription: Optional[Subscription] = None,
+    ) -> Subscription:
         """Create schedule subscription.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to which the subscription belongs.
         :param schedule_id: str
           UUID identifying the schedule to which the subscription belongs.
         :param subscription: :class:`Subscription` (optional)
-        
+
         :returns: :class:`Subscription`
         """
         body = subscription.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'POST',
-            f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions',
+            "POST",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return Subscription.from_dict(res)
 
-    def delete_schedule(self, dashboard_id: str, schedule_id: str, *, etag: Optional[str] = None):
+    def delete_schedule(
+        self,
+        dashboard_id: str,
+        schedule_id: str,
+        *,
+        etag: Optional[str] = None,
+    ):
         """Delete dashboard schedule.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
@@ -1879,27 +2214,34 @@ def delete_schedule(self, dashboard_id: str, schedule_id: str, *, etag: Optional
         :param etag: str (optional)
           The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been
           modified from its last retrieval.
-        
-        
+
+
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE',
-                     f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}',
-                     query=query,
-                     headers=headers)
-
-    def delete_subscription(self,
-                            dashboard_id: str,
-                            schedule_id: str,
-                            subscription_id: str,
-                            *,
-                            etag: Optional[str] = None):
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}",
+            query=query,
+            headers=headers,
+        )
+
+    def delete_subscription(
+        self,
+        dashboard_id: str,
+        schedule_id: str,
+        subscription_id: str,
+        *,
+        etag: Optional[str] = None,
+    ):
         """Delete schedule subscription.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard which the subscription belongs.
         :param schedule_id: str
@@ -1909,99 +2251,124 @@ def delete_subscription(self,
         :param etag: str (optional)
           The etag for the subscription. Can be optionally provided to ensure that the subscription has not
           been modified since the last read.
-        
-        
+
+
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}',
+            "DELETE",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
 
     def get(self, dashboard_id: str) -> Dashboard:
         """Get dashboard.
-        
+
         Get a draft dashboard.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard.
-        
+
         :returns: :class:`Dashboard`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/lakeview/dashboards/{dashboard_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}",
+            headers=headers,
+        )
         return Dashboard.from_dict(res)
 
     def get_published(self, dashboard_id: str) -> PublishedDashboard:
         """Get published dashboard.
-        
+
         Get the current published dashboard.
-        
+
         :param dashboard_id: str
           UUID identifying the published dashboard.
-        
+
         :returns: :class:`PublishedDashboard`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/published",
+            headers=headers,
+        )
         return PublishedDashboard.from_dict(res)
 
     def get_schedule(self, dashboard_id: str, schedule_id: str) -> Schedule:
         """Get dashboard schedule.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
           UUID identifying the schedule.
-        
+
         :returns: :class:`Schedule`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}",
+            headers=headers,
+        )
         return Schedule.from_dict(res)
 
     def get_subscription(self, dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription:
         """Get schedule subscription.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard which the subscription belongs.
         :param schedule_id: str
           UUID identifying the schedule which the subscription belongs.
         :param subscription_id: str
           UUID identifying the subscription.
-        
+
         :returns: :class:`Subscription`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}",
+            headers=headers,
+        )
         return Subscription.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None,
-             show_trashed: Optional[bool] = None,
-             view: Optional[DashboardView] = None) -> Iterator[Dashboard]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+        show_trashed: Optional[bool] = None,
+        view: Optional[DashboardView] = None,
+    ) -> Iterator[Dashboard]:
         """List dashboards.
-        
+
         :param page_size: int (optional)
           The number of dashboards to return per page.
         :param page_token: str (optional)
@@ -2012,33 +2379,46 @@ def list(self,
           returned.
         :param view: :class:`DashboardView` (optional)
           `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
-        
+
         :returns: Iterator over :class:`Dashboard`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if show_trashed is not None: query['show_trashed'] = show_trashed
-        if view is not None: query['view'] = view.value
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if show_trashed is not None:
+            query["show_trashed"] = show_trashed
+        if view is not None:
+            query["view"] = view.value
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/lakeview/dashboards', query=query, headers=headers)
-            if 'dashboards' in json:
-                for v in json['dashboards']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/lakeview/dashboards",
+                query=query,
+                headers=headers,
+            )
+            if "dashboards" in json:
+                for v in json["dashboards"]:
                     yield Dashboard.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_schedules(self,
-                       dashboard_id: str,
-                       *,
-                       page_size: Optional[int] = None,
-                       page_token: Optional[str] = None) -> Iterator[Schedule]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_schedules(
+        self,
+        dashboard_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Schedule]:
         """List dashboard schedules.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedules belongs.
         :param page_size: int (optional)
@@ -2046,35 +2426,43 @@ def list_schedules(self,
         :param page_token: str (optional)
           A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent
           page.
-        
+
         :returns: Iterator over :class:`Schedule`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules',
-                                query=query,
-                                headers=headers)
-            if 'schedules' in json:
-                for v in json['schedules']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules",
+                query=query,
+                headers=headers,
+            )
+            if "schedules" in json:
+                for v in json["schedules"]:
                     yield Schedule.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_subscriptions(self,
-                           dashboard_id: str,
-                           schedule_id: str,
-                           *,
-                           page_size: Optional[int] = None,
-                           page_token: Optional[str] = None) -> Iterator[Subscription]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_subscriptions(
+        self,
+        dashboard_id: str,
+        schedule_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Subscription]:
         """List schedule subscriptions.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard which the subscriptions belongs.
         :param schedule_id: str
@@ -2084,38 +2472,45 @@ def list_subscriptions(self,
         :param page_token: str (optional)
           A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent
           page.
-        
+
         :returns: Iterator over :class:`Subscription`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
             json = self._api.do(
-                'GET',
-                f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions',
+                "GET",
+                f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions",
                 query=query,
-                headers=headers)
-            if 'subscriptions' in json:
-                for v in json['subscriptions']:
+                headers=headers,
+            )
+            if "subscriptions" in json:
+                for v in json["subscriptions"]:
                     yield Subscription.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def migrate(self,
-                source_dashboard_id: str,
-                *,
-                display_name: Optional[str] = None,
-                parent_path: Optional[str] = None,
-                update_parameter_syntax: Optional[bool] = None) -> Dashboard:
+            query["page_token"] = json["next_page_token"]
+
+    def migrate(
+        self,
+        source_dashboard_id: str,
+        *,
+        display_name: Optional[str] = None,
+        parent_path: Optional[str] = None,
+        update_parameter_syntax: Optional[bool] = None,
+    ) -> Dashboard:
         """Migrate dashboard.
-        
+
         Migrates a classic SQL dashboard to Lakeview.
-        
+
         :param source_dashboard_id: str
           UUID of the dashboard to be migrated.
         :param display_name: str (optional)
@@ -2125,28 +2520,42 @@ def migrate(self,
         :param update_parameter_syntax: bool (optional)
           Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
           (:param) when converting datasets in the dashboard.
-        
+
         :returns: :class:`Dashboard`
         """
         body = {}
-        if display_name is not None: body['display_name'] = display_name
-        if parent_path is not None: body['parent_path'] = parent_path
-        if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id
-        if update_parameter_syntax is not None: body['update_parameter_syntax'] = update_parameter_syntax
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if display_name is not None:
+            body["display_name"] = display_name
+        if parent_path is not None:
+            body["parent_path"] = parent_path
+        if source_dashboard_id is not None:
+            body["source_dashboard_id"] = source_dashboard_id
+        if update_parameter_syntax is not None:
+            body["update_parameter_syntax"] = update_parameter_syntax
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/lakeview/dashboards/migrate', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/lakeview/dashboards/migrate",
+            body=body,
+            headers=headers,
+        )
         return Dashboard.from_dict(res)
 
-    def publish(self,
-                dashboard_id: str,
-                *,
-                embed_credentials: Optional[bool] = None,
-                warehouse_id: Optional[str] = None) -> PublishedDashboard:
+    def publish(
+        self,
+        dashboard_id: str,
+        *,
+        embed_credentials: Optional[bool] = None,
+        warehouse_id: Optional[str] = None,
+    ) -> PublishedDashboard:
         """Publish dashboard.
-        
+
         Publish the current draft dashboard.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to be published.
         :param embed_credentials: bool (optional)
@@ -2154,92 +2563,123 @@ def publish(self,
           embedded credentials will be used to execute the published dashboard's queries.
         :param warehouse_id: str (optional)
           The ID of the warehouse that can be used to override the warehouse which was set in the draft.
-        
+
         :returns: :class:`PublishedDashboard`
         """
         body = {}
-        if embed_credentials is not None: body['embed_credentials'] = embed_credentials
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if embed_credentials is not None:
+            body["embed_credentials"] = embed_credentials
+        if warehouse_id is not None:
+            body["warehouse_id"] = warehouse_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/lakeview/dashboards/{dashboard_id}/published',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/published",
+            body=body,
+            headers=headers,
+        )
         return PublishedDashboard.from_dict(res)
 
     def trash(self, dashboard_id: str):
         """Trash dashboard.
-        
+
         Trash a dashboard.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}",
+            headers=headers,
+        )
 
     def unpublish(self, dashboard_id: str):
         """Unpublish dashboard.
-        
+
         Unpublish the dashboard.
-        
+
         :param dashboard_id: str
           UUID identifying the published dashboard.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/published",
+            headers=headers,
+        )
 
     def update(self, dashboard_id: str, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         """Update dashboard.
-        
+
         Update a draft dashboard.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard.
         :param dashboard: :class:`Dashboard` (optional)
-        
+
         :returns: :class:`Dashboard`
         """
         body = dashboard.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.0/lakeview/dashboards/{dashboard_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}",
+            body=body,
+            headers=headers,
+        )
         return Dashboard.from_dict(res)
 
-    def update_schedule(self,
-                        dashboard_id: str,
-                        schedule_id: str,
-                        *,
-                        schedule: Optional[Schedule] = None) -> Schedule:
+    def update_schedule(
+        self,
+        dashboard_id: str,
+        schedule_id: str,
+        *,
+        schedule: Optional[Schedule] = None,
+    ) -> Schedule:
         """Update dashboard schedule.
-        
+
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
           UUID identifying the schedule.
         :param schedule: :class:`Schedule` (optional)
-        
+
         :returns: :class:`Schedule`
         """
         body = schedule.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}",
+            body=body,
+            headers=headers,
+        )
         return Schedule.from_dict(res)
 
 
@@ -2251,20 +2691,24 @@ def __init__(self, api_client):
 
     def get_published_dashboard_embedded(self, dashboard_id: str):
         """Read a published dashboard in an embedded ui.
-        
+
         Get the current published dashboard within an embedded context.
-        
+
         :param dashboard_id: str
           UUID identifying the published dashboard.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('GET',
-                     f'/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded',
-                     headers=headers)
+        self._api.do(
+            "GET",
+            f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded",
+            headers=headers,
+        )
 
 
 class QueryExecutionAPI:
@@ -2273,37 +2717,51 @@ class QueryExecutionAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def cancel_published_query_execution(self,
-                                         dashboard_name: str,
-                                         dashboard_revision_id: str,
-                                         *,
-                                         tokens: Optional[List[str]] = None) -> CancelQueryExecutionResponse:
+    def cancel_published_query_execution(
+        self,
+        dashboard_name: str,
+        dashboard_revision_id: str,
+        *,
+        tokens: Optional[List[str]] = None,
+    ) -> CancelQueryExecutionResponse:
         """Cancel the results for the a query for a published, embedded dashboard.
-        
+
         :param dashboard_name: str
         :param dashboard_revision_id: str
         :param tokens: List[str] (optional)
           Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
-        
+
         :returns: :class:`CancelQueryExecutionResponse`
         """
 
         query = {}
-        if dashboard_name is not None: query['dashboard_name'] = dashboard_name
-        if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id
-        if tokens is not None: query['tokens'] = [v for v in tokens]
-        headers = {'Accept': 'application/json', }
+        if dashboard_name is not None:
+            query["dashboard_name"] = dashboard_name
+        if dashboard_revision_id is not None:
+            query["dashboard_revision_id"] = dashboard_revision_id
+        if tokens is not None:
+            query["tokens"] = [v for v in tokens]
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE', '/api/2.0/lakeview-query/query/published', query=query, headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/lakeview-query/query/published",
+            query=query,
+            headers=headers,
+        )
         return CancelQueryExecutionResponse.from_dict(res)
 
-    def execute_published_dashboard_query(self,
-                                          dashboard_name: str,
-                                          dashboard_revision_id: str,
-                                          *,
-                                          override_warehouse_id: Optional[str] = None):
+    def execute_published_dashboard_query(
+        self,
+        dashboard_name: str,
+        dashboard_revision_id: str,
+        *,
+        override_warehouse_id: Optional[str] = None,
+    ):
         """Execute a query for a published dashboard.
-        
+
         :param dashboard_name: str
           Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
           list of datasets, warehouse_id, and embedded_credentials
@@ -2311,37 +2769,60 @@ def execute_published_dashboard_query(self,
         :param override_warehouse_id: str (optional)
           A dashboard schedule can override the warehouse used as compute for processing the published
           dashboard queries
-        
-        
+
+
         """
         body = {}
-        if dashboard_name is not None: body['dashboard_name'] = dashboard_name
-        if dashboard_revision_id is not None: body['dashboard_revision_id'] = dashboard_revision_id
-        if override_warehouse_id is not None: body['override_warehouse_id'] = override_warehouse_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if dashboard_name is not None:
+            body["dashboard_name"] = dashboard_name
+        if dashboard_revision_id is not None:
+            body["dashboard_revision_id"] = dashboard_revision_id
+        if override_warehouse_id is not None:
+            body["override_warehouse_id"] = override_warehouse_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/lakeview-query/query/published', body=body, headers=headers)
-
-    def poll_published_query_status(self,
-                                    dashboard_name: str,
-                                    dashboard_revision_id: str,
-                                    *,
-                                    tokens: Optional[List[str]] = None) -> PollQueryStatusResponse:
+        self._api.do(
+            "POST",
+            "/api/2.0/lakeview-query/query/published",
+            body=body,
+            headers=headers,
+        )
+
+    def poll_published_query_status(
+        self,
+        dashboard_name: str,
+        dashboard_revision_id: str,
+        *,
+        tokens: Optional[List[str]] = None,
+    ) -> PollQueryStatusResponse:
         """Poll the results for the a query for a published, embedded dashboard.
-        
+
         :param dashboard_name: str
         :param dashboard_revision_id: str
         :param tokens: List[str] (optional)
           Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
-        
+
         :returns: :class:`PollQueryStatusResponse`
         """
 
         query = {}
-        if dashboard_name is not None: query['dashboard_name'] = dashboard_name
-        if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id
-        if tokens is not None: query['tokens'] = [v for v in tokens]
-        headers = {'Accept': 'application/json', }
+        if dashboard_name is not None:
+            query["dashboard_name"] = dashboard_name
+        if dashboard_revision_id is not None:
+            query["dashboard_revision_id"] = dashboard_revision_id
+        if tokens is not None:
+            query["tokens"] = [v for v in tokens]
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/lakeview-query/query/published', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/lakeview-query/query/published",
+            query=query,
+            headers=headers,
+        )
         return PollQueryStatusResponse.from_dict(res)
diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py
index 99c252298..ff892d85d 100755
--- a/databricks/sdk/service/files.py
+++ b/databricks/sdk/service/files.py
@@ -8,7 +8,7 @@
 
 from ._internal import _escape_multi_segment_path_parameter, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -24,21 +24,25 @@ class AddBlock:
     def as_dict(self) -> dict:
         """Serializes the AddBlock into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data is not None: body['data'] = self.data
-        if self.handle is not None: body['handle'] = self.handle
+        if self.data is not None:
+            body["data"] = self.data
+        if self.handle is not None:
+            body["handle"] = self.handle
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AddBlock into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data is not None: body['data'] = self.data
-        if self.handle is not None: body['handle'] = self.handle
+        if self.data is not None:
+            body["data"] = self.data
+        if self.handle is not None:
+            body["handle"] = self.handle
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddBlock:
         """Deserializes the AddBlock from a dictionary."""
-        return cls(data=d.get('data', None), handle=d.get('handle', None))
+        return cls(data=d.get("data", None), handle=d.get("handle", None))
 
 
 @dataclass
@@ -68,19 +72,21 @@ class Close:
     def as_dict(self) -> dict:
         """Serializes the Close into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.handle is not None: body['handle'] = self.handle
+        if self.handle is not None:
+            body["handle"] = self.handle
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Close into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.handle is not None: body['handle'] = self.handle
+        if self.handle is not None:
+            body["handle"] = self.handle
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Close:
         """Deserializes the Close from a dictionary."""
-        return cls(handle=d.get('handle', None))
+        return cls(handle=d.get("handle", None))
 
 
 @dataclass
@@ -113,21 +119,25 @@ class Create:
     def as_dict(self) -> dict:
         """Serializes the Create into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.overwrite is not None: body['overwrite'] = self.overwrite
-        if self.path is not None: body['path'] = self.path
+        if self.overwrite is not None:
+            body["overwrite"] = self.overwrite
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Create into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.overwrite is not None: body['overwrite'] = self.overwrite
-        if self.path is not None: body['path'] = self.path
+        if self.overwrite is not None:
+            body["overwrite"] = self.overwrite
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Create:
         """Deserializes the Create from a dictionary."""
-        return cls(overwrite=d.get('overwrite', None), path=d.get('path', None))
+        return cls(overwrite=d.get("overwrite", None), path=d.get("path", None))
 
 
 @dataclass
@@ -158,19 +168,21 @@ class CreateResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.handle is not None: body['handle'] = self.handle
+        if self.handle is not None:
+            body["handle"] = self.handle
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.handle is not None: body['handle'] = self.handle
+        if self.handle is not None:
+            body["handle"] = self.handle
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
-        return cls(handle=d.get('handle', None))
+        return cls(handle=d.get("handle", None))
 
 
 @dataclass
@@ -185,21 +197,25 @@ class Delete:
     def as_dict(self) -> dict:
         """Serializes the Delete into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.recursive is not None: body['recursive'] = self.recursive
+        if self.path is not None:
+            body["path"] = self.path
+        if self.recursive is not None:
+            body["recursive"] = self.recursive
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Delete into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.recursive is not None: body['recursive'] = self.recursive
+        if self.path is not None:
+            body["path"] = self.path
+        if self.recursive is not None:
+            body["recursive"] = self.recursive
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Delete:
         """Deserializes the Delete from a dictionary."""
-        return cls(path=d.get('path', None), recursive=d.get('recursive', None))
+        return cls(path=d.get("path", None), recursive=d.get("recursive", None))
 
 
 @dataclass
@@ -260,31 +276,43 @@ class DirectoryEntry:
     def as_dict(self) -> dict:
         """Serializes the DirectoryEntry into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_size is not None: body['file_size'] = self.file_size
-        if self.is_directory is not None: body['is_directory'] = self.is_directory
-        if self.last_modified is not None: body['last_modified'] = self.last_modified
-        if self.name is not None: body['name'] = self.name
-        if self.path is not None: body['path'] = self.path
+        if self.file_size is not None:
+            body["file_size"] = self.file_size
+        if self.is_directory is not None:
+            body["is_directory"] = self.is_directory
+        if self.last_modified is not None:
+            body["last_modified"] = self.last_modified
+        if self.name is not None:
+            body["name"] = self.name
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DirectoryEntry into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_size is not None: body['file_size'] = self.file_size
-        if self.is_directory is not None: body['is_directory'] = self.is_directory
-        if self.last_modified is not None: body['last_modified'] = self.last_modified
-        if self.name is not None: body['name'] = self.name
-        if self.path is not None: body['path'] = self.path
+        if self.file_size is not None:
+            body["file_size"] = self.file_size
+        if self.is_directory is not None:
+            body["is_directory"] = self.is_directory
+        if self.last_modified is not None:
+            body["last_modified"] = self.last_modified
+        if self.name is not None:
+            body["name"] = self.name
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DirectoryEntry:
         """Deserializes the DirectoryEntry from a dictionary."""
-        return cls(file_size=d.get('file_size', None),
-                   is_directory=d.get('is_directory', None),
-                   last_modified=d.get('last_modified', None),
-                   name=d.get('name', None),
-                   path=d.get('path', None))
+        return cls(
+            file_size=d.get("file_size", None),
+            is_directory=d.get("is_directory", None),
+            last_modified=d.get("last_modified", None),
+            name=d.get("name", None),
+            path=d.get("path", None),
+        )
 
 
 @dataclass
@@ -300,28 +328,38 @@ class DownloadResponse:
     def as_dict(self) -> dict:
         """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content_length is not None: body['content-length'] = self.content_length
-        if self.content_type is not None: body['content-type'] = self.content_type
-        if self.contents: body['contents'] = self.contents
-        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        if self.content_length is not None:
+            body["content-length"] = self.content_length
+        if self.content_type is not None:
+            body["content-type"] = self.content_type
+        if self.contents:
+            body["contents"] = self.contents
+        if self.last_modified is not None:
+            body["last-modified"] = self.last_modified
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content_length is not None: body['content-length'] = self.content_length
-        if self.content_type is not None: body['content-type'] = self.content_type
-        if self.contents: body['contents'] = self.contents
-        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        if self.content_length is not None:
+            body["content-length"] = self.content_length
+        if self.content_type is not None:
+            body["content-type"] = self.content_type
+        if self.contents:
+            body["contents"] = self.contents
+        if self.last_modified is not None:
+            body["last-modified"] = self.last_modified
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         """Deserializes the DownloadResponse from a dictionary."""
-        return cls(content_length=int(d.get('content-length', None)),
-                   content_type=d.get('content-type', None),
-                   contents=d.get('contents', None),
-                   last_modified=d.get('last-modified', None))
+        return cls(
+            content_length=int(d.get("content-length", None)),
+            content_type=d.get("content-type", None),
+            contents=d.get("contents", None),
+            last_modified=d.get("last-modified", None),
+        )
 
 
 @dataclass
@@ -341,28 +379,38 @@ class FileInfo:
     def as_dict(self) -> dict:
         """Serializes the FileInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_size is not None: body['file_size'] = self.file_size
-        if self.is_dir is not None: body['is_dir'] = self.is_dir
-        if self.modification_time is not None: body['modification_time'] = self.modification_time
-        if self.path is not None: body['path'] = self.path
+        if self.file_size is not None:
+            body["file_size"] = self.file_size
+        if self.is_dir is not None:
+            body["is_dir"] = self.is_dir
+        if self.modification_time is not None:
+            body["modification_time"] = self.modification_time
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_size is not None: body['file_size'] = self.file_size
-        if self.is_dir is not None: body['is_dir'] = self.is_dir
-        if self.modification_time is not None: body['modification_time'] = self.modification_time
-        if self.path is not None: body['path'] = self.path
+        if self.file_size is not None:
+            body["file_size"] = self.file_size
+        if self.is_dir is not None:
+            body["is_dir"] = self.is_dir
+        if self.modification_time is not None:
+            body["modification_time"] = self.modification_time
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
-        return cls(file_size=d.get('file_size', None),
-                   is_dir=d.get('is_dir', None),
-                   modification_time=d.get('modification_time', None),
-                   path=d.get('path', None))
+        return cls(
+            file_size=d.get("file_size", None),
+            is_dir=d.get("is_dir", None),
+            modification_time=d.get("modification_time", None),
+            path=d.get("path", None),
+        )
 
 
 @dataclass
@@ -395,25 +443,33 @@ class GetMetadataResponse:
     def as_dict(self) -> dict:
         """Serializes the GetMetadataResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content_length is not None: body['content-length'] = self.content_length
-        if self.content_type is not None: body['content-type'] = self.content_type
-        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        if self.content_length is not None:
+            body["content-length"] = self.content_length
+        if self.content_type is not None:
+            body["content-type"] = self.content_type
+        if self.last_modified is not None:
+            body["last-modified"] = self.last_modified
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetMetadataResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content_length is not None: body['content-length'] = self.content_length
-        if self.content_type is not None: body['content-type'] = self.content_type
-        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        if self.content_length is not None:
+            body["content-length"] = self.content_length
+        if self.content_type is not None:
+            body["content-type"] = self.content_type
+        if self.last_modified is not None:
+            body["last-modified"] = self.last_modified
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetadataResponse:
         """Deserializes the GetMetadataResponse from a dictionary."""
-        return cls(content_length=int(d.get('content-length', None)),
-                   content_type=d.get('content-type', None),
-                   last_modified=d.get('last-modified', None))
+        return cls(
+            content_length=int(d.get("content-length", None)),
+            content_type=d.get("content-type", None),
+            last_modified=d.get("last-modified", None),
+        )
 
 
 @dataclass
@@ -427,22 +483,28 @@ class ListDirectoryResponse:
     def as_dict(self) -> dict:
         """Serializes the ListDirectoryResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.contents: body['contents'] = [v.as_dict() for v in self.contents]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.contents:
+            body["contents"] = [v.as_dict() for v in self.contents]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListDirectoryResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.contents: body['contents'] = self.contents
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.contents:
+            body["contents"] = self.contents
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListDirectoryResponse:
         """Deserializes the ListDirectoryResponse from a dictionary."""
-        return cls(contents=_repeated_dict(d, 'contents', DirectoryEntry),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            contents=_repeated_dict(d, "contents", DirectoryEntry),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -453,19 +515,21 @@ class ListStatusResponse:
     def as_dict(self) -> dict:
         """Serializes the ListStatusResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.files: body['files'] = [v.as_dict() for v in self.files]
+        if self.files:
+            body["files"] = [v.as_dict() for v in self.files]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListStatusResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.files: body['files'] = self.files
+        if self.files:
+            body["files"] = self.files
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListStatusResponse:
         """Deserializes the ListStatusResponse from a dictionary."""
-        return cls(files=_repeated_dict(d, 'files', FileInfo))
+        return cls(files=_repeated_dict(d, "files", FileInfo))
 
 
 @dataclass
@@ -476,19 +540,21 @@ class MkDirs:
     def as_dict(self) -> dict:
         """Serializes the MkDirs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MkDirs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkDirs:
         """Deserializes the MkDirs from a dictionary."""
-        return cls(path=d.get('path', None))
+        return cls(path=d.get("path", None))
 
 
 @dataclass
@@ -521,21 +587,28 @@ class Move:
     def as_dict(self) -> dict:
         """Serializes the Move into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_path is not None: body['destination_path'] = self.destination_path
-        if self.source_path is not None: body['source_path'] = self.source_path
+        if self.destination_path is not None:
+            body["destination_path"] = self.destination_path
+        if self.source_path is not None:
+            body["source_path"] = self.source_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Move into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_path is not None: body['destination_path'] = self.destination_path
-        if self.source_path is not None: body['source_path'] = self.source_path
+        if self.destination_path is not None:
+            body["destination_path"] = self.destination_path
+        if self.source_path is not None:
+            body["source_path"] = self.source_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Move:
         """Deserializes the Move from a dictionary."""
-        return cls(destination_path=d.get('destination_path', None), source_path=d.get('source_path', None))
+        return cls(
+            destination_path=d.get("destination_path", None),
+            source_path=d.get("source_path", None),
+        )
 
 
 @dataclass
@@ -571,25 +644,33 @@ class Put:
     def as_dict(self) -> dict:
         """Serializes the Put into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.contents is not None: body['contents'] = self.contents
-        if self.overwrite is not None: body['overwrite'] = self.overwrite
-        if self.path is not None: body['path'] = self.path
+        if self.contents is not None:
+            body["contents"] = self.contents
+        if self.overwrite is not None:
+            body["overwrite"] = self.overwrite
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Put into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.contents is not None: body['contents'] = self.contents
-        if self.overwrite is not None: body['overwrite'] = self.overwrite
-        if self.path is not None: body['path'] = self.path
+        if self.contents is not None:
+            body["contents"] = self.contents
+        if self.overwrite is not None:
+            body["overwrite"] = self.overwrite
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Put:
         """Deserializes the Put from a dictionary."""
-        return cls(contents=d.get('contents', None),
-                   overwrite=d.get('overwrite', None),
-                   path=d.get('path', None))
+        return cls(
+            contents=d.get("contents", None),
+            overwrite=d.get("overwrite", None),
+            path=d.get("path", None),
+        )
 
 
 @dataclass
@@ -623,21 +704,25 @@ class ReadResponse:
     def as_dict(self) -> dict:
         """Serializes the ReadResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bytes_read is not None: body['bytes_read'] = self.bytes_read
-        if self.data is not None: body['data'] = self.data
+        if self.bytes_read is not None:
+            body["bytes_read"] = self.bytes_read
+        if self.data is not None:
+            body["data"] = self.data
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ReadResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bytes_read is not None: body['bytes_read'] = self.bytes_read
-        if self.data is not None: body['data'] = self.data
+        if self.bytes_read is not None:
+            body["bytes_read"] = self.bytes_read
+        if self.data is not None:
+            body["data"] = self.data
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReadResponse:
         """Deserializes the ReadResponse from a dictionary."""
-        return cls(bytes_read=d.get('bytes_read', None), data=d.get('data', None))
+        return cls(bytes_read=d.get("bytes_read", None), data=d.get("data", None))
 
 
 @dataclass
@@ -668,232 +753,284 @@ def __init__(self, api_client):
 
     def add_block(self, handle: int, data: str):
         """Append data block.
-        
+
         Appends a block of data to the stream specified by the input handle. If the handle does not exist,
         this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
-        
+
         If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``.
-        
+
         :param handle: int
           The handle on an open stream.
         :param data: str
           The base64-encoded data to append to the stream. This has a limit of 1 MB.
-        
-        
+
+
         """
         body = {}
-        if data is not None: body['data'] = data
-        if handle is not None: body['handle'] = handle
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if data is not None:
+            body["data"] = data
+        if handle is not None:
+            body["handle"] = handle
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/dbfs/add-block', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/dbfs/add-block", body=body, headers=headers)
 
     def close(self, handle: int):
         """Close the stream.
-        
+
         Closes the stream specified by the input handle. If the handle does not exist, this call throws an
         exception with ``RESOURCE_DOES_NOT_EXIST``.
-        
+
         :param handle: int
           The handle on an open stream.
-        
-        
+
+
         """
         body = {}
-        if handle is not None: body['handle'] = handle
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if handle is not None:
+            body["handle"] = handle
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/dbfs/close', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/dbfs/close", body=body, headers=headers)
 
     def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateResponse:
         """Open a stream.
-        
+
         Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
         timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is
         set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``.
-        
+
         A typical workflow for file upload would be:
-        
+
         1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle
         you have. 3. Issue a ``close`` call with the handle you have.
-        
+
         :param path: str
           The path of the new file. The path should be the absolute DBFS path.
         :param overwrite: bool (optional)
           The flag that specifies whether to overwrite existing file/files.
-        
+
         :returns: :class:`CreateResponse`
         """
         body = {}
-        if overwrite is not None: body['overwrite'] = overwrite
-        if path is not None: body['path'] = path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/dbfs/create', body=body, headers=headers)
+        if overwrite is not None:
+            body["overwrite"] = overwrite
+        if path is not None:
+            body["path"] = path
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/dbfs/create", body=body, headers=headers)
         return CreateResponse.from_dict(res)
 
     def delete(self, path: str, *, recursive: Optional[bool] = None):
         """Delete a file/directory.
-        
+
         Delete the file or directory (optionally recursively delete all files in the directory). This call
         throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive` is set to
         `false` or on other similar errors.
-        
+
         When you delete a large number of files, the delete operation is done in increments. The call returns
         a response after approximately 45 seconds with an error message (503 Service Unavailable) asking you
         to re-invoke the delete operation until the directory structure is fully deleted.
-        
+
         For operations that delete more than 10K files, we discourage using the DBFS REST API, but advise you
         to perform such operations in the context of a cluster, using the [File system utility
         (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs). `dbutils.fs` covers the functional scope
         of the DBFS REST API, but from notebooks. Running such operations using notebooks provides better
         control and manageability, such as selective deletes, and the possibility to automate periodic delete
         jobs.
-        
+
         :param path: str
           The path of the file or directory to delete. The path should be the absolute DBFS path.
         :param recursive: bool (optional)
           Whether or not to recursively delete the directory's contents. Deleting empty directories can be
           done without providing the recursive flag.
-        
-        
+
+
         """
         body = {}
-        if path is not None: body['path'] = path
-        if recursive is not None: body['recursive'] = recursive
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if path is not None:
+            body["path"] = path
+        if recursive is not None:
+            body["recursive"] = recursive
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/dbfs/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/dbfs/delete", body=body, headers=headers)
 
     def get_status(self, path: str) -> FileInfo:
         """Get the information of a file or directory.
-        
+
         Gets the file information for a file or directory. If the file or directory does not exist, this call
         throws an exception with `RESOURCE_DOES_NOT_EXIST`.
-        
+
         :param path: str
           The path of the file or directory. The path should be the absolute DBFS path.
-        
+
         :returns: :class:`FileInfo`
         """
 
         query = {}
-        if path is not None: query['path'] = path
-        headers = {'Accept': 'application/json', }
+        if path is not None:
+            query["path"] = path
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/dbfs/get-status', query=query, headers=headers)
+        res = self._api.do("GET", "/api/2.0/dbfs/get-status", query=query, headers=headers)
         return FileInfo.from_dict(res)
 
     def list(self, path: str) -> Iterator[FileInfo]:
         """List directory contents or file details.
-        
+
         List the contents of a directory, or details of the file. If the file or directory does not exist,
         this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
-        
+
         When calling list on a large directory, the list operation will time out after approximately 60
         seconds. We strongly recommend using list only on directories containing less than 10K files and
         discourage using the DBFS REST API for operations that list more than 10K files. Instead, we recommend
         that you perform such operations in the context of a cluster, using the [File system utility
         (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs), which provides the same functionality
         without timing out.
-        
+
         :param path: str
           The path of the file or directory. The path should be the absolute DBFS path.
-        
+
         :returns: Iterator over :class:`FileInfo`
         """
 
         query = {}
-        if path is not None: query['path'] = path
-        headers = {'Accept': 'application/json', }
+        if path is not None:
+            query["path"] = path
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/dbfs/list', query=query, headers=headers)
+        json = self._api.do("GET", "/api/2.0/dbfs/list", query=query, headers=headers)
         parsed = ListStatusResponse.from_dict(json).files
         return parsed if parsed is not None else []
 
     def mkdirs(self, path: str):
         """Create a directory.
-        
+
         Creates the given directory and necessary parent directories if they do not exist. If a file (not a
         directory) exists at any prefix of the input path, this call throws an exception with
         `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in creating some
         of the necessary parent directories.
-        
+
         :param path: str
           The path of the new directory. The path should be the absolute DBFS path.
-        
-        
+
+
         """
         body = {}
-        if path is not None: body['path'] = path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if path is not None:
+            body["path"] = path
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/dbfs/mkdirs', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/dbfs/mkdirs", body=body, headers=headers)
 
     def move(self, source_path: str, destination_path: str):
         """Move a file.
-        
+
         Moves a file from one location to another location within DBFS. If the source file does not exist,
         this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the
         destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source
         path is a directory, this call always recursively moves all files.
-        
+
         :param source_path: str
           The source path of the file or directory. The path should be the absolute DBFS path.
         :param destination_path: str
           The destination path of the file or directory. The path should be the absolute DBFS path.
-        
-        
-        """
-        body = {}
-        if destination_path is not None: body['destination_path'] = destination_path
-        if source_path is not None: body['source_path'] = source_path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/dbfs/move', body=body, headers=headers)
 
-    def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[bool] = None):
+        """
+        body = {}
+        if destination_path is not None:
+            body["destination_path"] = destination_path
+        if source_path is not None:
+            body["source_path"] = source_path
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do("POST", "/api/2.0/dbfs/move", body=body, headers=headers)
+
+    def put(
+        self,
+        path: str,
+        *,
+        contents: Optional[str] = None,
+        overwrite: Optional[bool] = None,
+    ):
         """Upload a file.
-        
+
         Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
         can also be used as a convenient single call for data upload.
-        
+
         Alternatively you can pass contents as base64 string.
-        
+
         The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited
         to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded.
-        
+
         If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create,
         :method:dbfs/addBlock, :method:dbfs/close.
-        
+
         :param path: str
           The path of the new file. The path should be the absolute DBFS path.
         :param contents: str (optional)
           This parameter might be absent, and instead a posted file will be used.
         :param overwrite: bool (optional)
           The flag that specifies whether to overwrite existing file/files.
-        
-        
-        """
-        body = {}
-        if contents is not None: body['contents'] = contents
-        if overwrite is not None: body['overwrite'] = overwrite
-        if path is not None: body['path'] = path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/dbfs/put', body=body, headers=headers)
 
-    def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse:
+        """
+        body = {}
+        if contents is not None:
+            body["contents"] = contents
+        if overwrite is not None:
+            body["overwrite"] = overwrite
+        if path is not None:
+            body["path"] = path
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do("POST", "/api/2.0/dbfs/put", body=body, headers=headers)
+
+    def read(
+        self,
+        path: str,
+        *,
+        length: Optional[int] = None,
+        offset: Optional[int] = None,
+    ) -> ReadResponse:
         """Get the contents of a file.
-        
+
         Returns the contents of a file. If the file does not exist, this call throws an exception with
         `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset
         is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds
         1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
-        
+
         If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of
         file.
-        
+
         :param path: str
           The path of the file to read. The path should be the absolute DBFS path.
         :param length: int (optional)
@@ -901,17 +1038,22 @@ def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int]
           of 0.5 MB.
         :param offset: int (optional)
           The offset to read from in bytes.
-        
+
         :returns: :class:`ReadResponse`
         """
 
         query = {}
-        if length is not None: query['length'] = length
-        if offset is not None: query['offset'] = offset
-        if path is not None: query['path'] = path
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/dbfs/read', query=query, headers=headers)
+        if length is not None:
+            query["length"] = length
+        if offset is not None:
+            query["offset"] = offset
+        if path is not None:
+            query["path"] = path
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.0/dbfs/read", query=query, headers=headers)
         return ReadResponse.from_dict(res)
 
 
@@ -919,163 +1061,188 @@ class FilesAPI:
     """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and
     directories by referring to their URI. The API makes working with file content as raw bytes easier and
     more efficient.
-    
+
     The API supports [Unity Catalog volumes], where files and directories to operate on are specified using
     their volume URI path, which follows the format
     /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
-    
+
     The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
     working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
     and DELETE to manage files and directories specified using their URI path. The path is always absolute.
-    
+
     Some Files API client features are currently experimental. To enable them, set
     `enable_experimental_files_api_client = True` in your configuration profile or use the environment
     variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
-    
-    [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html"""
+
+    [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create_directory(self, directory_path: str):
         """Create a directory.
-        
+
         Creates an empty directory. If necessary, also creates any parent directories of the new, empty
         directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success
         response; this method is idempotent (it will succeed if the directory already exists).
-        
+
         :param directory_path: str
           The absolute path of a directory.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('PUT',
-                     f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
-                     headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}",
+            headers=headers,
+        )
 
     def delete(self, file_path: str):
         """Delete a file.
-        
+
         Deletes a file. If the request is successful, there is no response body.
-        
+
         :param file_path: str
           The absolute path of the file.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE',
-                     f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}",
+            headers=headers,
+        )
 
     def delete_directory(self, directory_path: str):
         """Delete a directory.
-        
+
         Deletes an empty directory.
-        
+
         To delete a non-empty directory, first delete all of its contents. This can be done by listing the
         directory contents and deleting each file and subdirectory recursively.
-        
+
         :param directory_path: str
           The absolute path of a directory.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE',
-                     f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}",
+            headers=headers,
+        )
 
     def download(self, file_path: str) -> DownloadResponse:
         """Download a file.
-        
+
         Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
         a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
-        
+
         :param file_path: str
           The absolute path of the file.
-        
+
         :returns: :class:`DownloadResponse`
         """
 
-        headers = {'Accept': 'application/octet-stream', }
-        response_headers = ['content-length', 'content-type', 'last-modified', ]
-        res = self._api.do('GET',
-                           f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
-                           headers=headers,
-                           response_headers=response_headers,
-                           raw=True)
+        headers = {
+            "Accept": "application/octet-stream",
+        }
+        response_headers = [
+            "content-length",
+            "content-type",
+            "last-modified",
+        ]
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}",
+            headers=headers,
+            response_headers=response_headers,
+            raw=True,
+        )
         return DownloadResponse.from_dict(res)
 
     def get_directory_metadata(self, directory_path: str):
         """Get directory metadata.
-        
+
         Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
         body.
-        
+
         This method is useful to check if a directory exists and the caller has access to it.
-        
+
         If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory
         if it does not exist, and is idempotent (it will succeed if the directory already exists).
-        
+
         :param directory_path: str
           The absolute path of a directory.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('HEAD',
-                     f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
-                     headers=headers)
+        self._api.do(
+            "HEAD",
+            f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}",
+            headers=headers,
+        )
 
     def get_metadata(self, file_path: str) -> GetMetadataResponse:
         """Get file metadata.
-        
+
         Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
-        
+
         :param file_path: str
           The absolute path of the file.
-        
+
         :returns: :class:`GetMetadataResponse`
         """
 
         headers = {}
-        response_headers = ['content-length', 'content-type', 'last-modified', ]
-        res = self._api.do('HEAD',
-                           f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
-                           headers=headers,
-                           response_headers=response_headers)
+        response_headers = [
+            "content-length",
+            "content-type",
+            "last-modified",
+        ]
+        res = self._api.do(
+            "HEAD",
+            f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}",
+            headers=headers,
+            response_headers=response_headers,
+        )
         return GetMetadataResponse.from_dict(res)
 
-    def list_directory_contents(self,
-                                directory_path: str,
-                                *,
-                                page_size: Optional[int] = None,
-                                page_token: Optional[str] = None) -> Iterator[DirectoryEntry]:
+    def list_directory_contents(
+        self,
+        directory_path: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[DirectoryEntry]:
         """List directory contents.
-        
+
         Returns the contents of a directory. If there is no directory at the specified path, the API returns a
         HTTP 404 error.
-        
+
         :param directory_path: str
           The absolute path of a directory.
         :param page_size: int (optional)
           The maximum number of directory entries to return. The response may contain fewer entries. If the
           response contains a `next_page_token`, there may be more entries, even if fewer than `page_size`
           entries are in the response.
-          
+
           We recommend not to set this value unless you are intentionally listing less than the complete
           directory contents.
-          
+
           If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values
           above 1000 will be coerced to 1000.
         :param page_token: str (optional)
@@ -1085,51 +1252,67 @@ def list_directory_contents(self,
           request. To list all of the entries in a directory, it is necessary to continue requesting pages of
           entries until the response contains no `next_page_token`. Note that the number of entries returned
           must not be used to determine when the listing is complete.
-        
+
         :returns: Iterator over :class:`DirectoryEntry`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
             json = self._api.do(
-                'GET',
-                f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}',
+                "GET",
+                f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}",
                 query=query,
-                headers=headers)
-            if 'contents' in json:
-                for v in json['contents']:
+                headers=headers,
+            )
+            if "contents" in json:
+                for v in json["contents"]:
                     yield DirectoryEntry.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool] = None):
+            query["page_token"] = json["next_page_token"]
+
+    def upload(
+        self,
+        file_path: str,
+        contents: BinaryIO,
+        *,
+        overwrite: Optional[bool] = None,
+    ):
         """Upload a file.
-        
+
         Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
         octet stream); do not encode or otherwise modify the bytes before sending. The contents of the
         resulting file will be exactly the bytes sent in the request body. If the request is successful, there
         is no response body.
-        
+
         :param file_path: str
           The absolute path of the file.
         :param contents: BinaryIO
         :param overwrite: bool (optional)
           If true, an existing file will be overwritten.
-        
-        
+
+
         """
 
         query = {}
-        if overwrite is not None: query['overwrite'] = overwrite
-        headers = {'Content-Type': 'application/octet-stream', }
-
-        self._api.do('PUT',
-                     f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
-                     query=query,
-                     headers=headers,
-                     data=contents)
+        if overwrite is not None:
+            query["overwrite"] = overwrite
+        headers = {
+            "Content-Type": "application/octet-stream",
+        }
+
+        self._api.do(
+            "PUT",
+            f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}",
+            query=query,
+            headers=headers,
+            data=contents,
+        )
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index 2f752d06c..6eac0b856 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -31,30 +31,38 @@ class AccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the AccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControlRequest:
         """Deserializes the AccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', PermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", PermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -77,58 +85,71 @@ class AccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the AccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse:
         """Deserializes the AccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', Permission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", Permission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
 class Actor:
     """represents an identity trying to access a resource - user or a service principal group can be a
-    principal of a permission set assignment but an actor is always a user or a service principal"""
+    principal of a permission set assignment but an actor is always a user or a service principal
+    """
 
     actor_id: Optional[int] = None
 
     def as_dict(self) -> dict:
         """Serializes the Actor into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.actor_id is not None: body['actor_id'] = self.actor_id
+        if self.actor_id is not None:
+            body["actor_id"] = self.actor_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Actor into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.actor_id is not None: body['actor_id'] = self.actor_id
+        if self.actor_id is not None:
+            body["actor_id"] = self.actor_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Actor:
         """Deserializes the Actor from a dictionary."""
-        return cls(actor_id=d.get('actor_id', None))
+        return cls(actor_id=d.get("actor_id", None))
 
 
 @dataclass
@@ -140,22 +161,28 @@ class CheckPolicyResponse:
     def as_dict(self) -> dict:
         """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict()
-        if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
+        if self.consistency_token:
+            body["consistency_token"] = self.consistency_token.as_dict()
+        if self.is_permitted is not None:
+            body["is_permitted"] = self.is_permitted
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.consistency_token: body['consistency_token'] = self.consistency_token
-        if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
+        if self.consistency_token:
+            body["consistency_token"] = self.consistency_token
+        if self.is_permitted is not None:
+            body["is_permitted"] = self.is_permitted
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CheckPolicyResponse:
         """Deserializes the CheckPolicyResponse from a dictionary."""
-        return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken),
-                   is_permitted=d.get('is_permitted', None))
+        return cls(
+            consistency_token=_from_dict(d, "consistency_token", ConsistencyToken),
+            is_permitted=d.get("is_permitted", None),
+        )
 
 
 @dataclass
@@ -173,31 +200,43 @@ class ComplexValue:
     def as_dict(self) -> dict:
         """Serializes the ComplexValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display is not None: body['display'] = self.display
-        if self.primary is not None: body['primary'] = self.primary
-        if self.ref is not None: body['$ref'] = self.ref
-        if self.type is not None: body['type'] = self.type
-        if self.value is not None: body['value'] = self.value
+        if self.display is not None:
+            body["display"] = self.display
+        if self.primary is not None:
+            body["primary"] = self.primary
+        if self.ref is not None:
+            body["$ref"] = self.ref
+        if self.type is not None:
+            body["type"] = self.type
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ComplexValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display is not None: body['display'] = self.display
-        if self.primary is not None: body['primary'] = self.primary
-        if self.ref is not None: body['$ref'] = self.ref
-        if self.type is not None: body['type'] = self.type
-        if self.value is not None: body['value'] = self.value
+        if self.display is not None:
+            body["display"] = self.display
+        if self.primary is not None:
+            body["primary"] = self.primary
+        if self.ref is not None:
+            body["$ref"] = self.ref
+        if self.type is not None:
+            body["type"] = self.type
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplexValue:
         """Deserializes the ComplexValue from a dictionary."""
-        return cls(display=d.get('display', None),
-                   primary=d.get('primary', None),
-                   ref=d.get('$ref', None),
-                   type=d.get('type', None),
-                   value=d.get('value', None))
+        return cls(
+            display=d.get("display", None),
+            primary=d.get("primary", None),
+            ref=d.get("$ref", None),
+            type=d.get("type", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -207,19 +246,21 @@ class ConsistencyToken:
     def as_dict(self) -> dict:
         """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConsistencyToken:
         """Deserializes the ConsistencyToken from a dictionary."""
-        return cls(value=d.get('value', None))
+        return cls(value=d.get("value", None))
 
 
 @dataclass
@@ -267,19 +308,21 @@ class GetAssignableRolesForResourceResponse:
     def as_dict(self) -> dict:
         """Serializes the GetAssignableRolesForResourceResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.roles: body['roles'] = [v.as_dict() for v in self.roles]
+        if self.roles:
+            body["roles"] = [v.as_dict() for v in self.roles]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetAssignableRolesForResourceResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.roles: body['roles'] = self.roles
+        if self.roles:
+            body["roles"] = self.roles
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetAssignableRolesForResourceResponse:
         """Deserializes the GetAssignableRolesForResourceResponse from a dictionary."""
-        return cls(roles=_repeated_dict(d, 'roles', Role))
+        return cls(roles=_repeated_dict(d, "roles", Role))
 
 
 @dataclass
@@ -290,19 +333,21 @@ class GetPasswordPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetPasswordPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPasswordPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPasswordPermissionLevelsResponse:
         """Deserializes the GetPasswordPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', PasswordPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", PasswordPermissionsDescription))
 
 
 @dataclass
@@ -313,25 +358,27 @@ class GetPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPermissionLevelsResponse:
         """Deserializes the GetPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', PermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", PermissionsDescription))
 
 
 class GetSortOrder(Enum):
 
-    ASCENDING = 'ascending'
-    DESCENDING = 'descending'
+    ASCENDING = "ascending"
+    DESCENDING = "descending"
 
 
 @dataclass
@@ -345,21 +392,25 @@ class GrantRule:
     def as_dict(self) -> dict:
         """Serializes the GrantRule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.principals: body['principals'] = [v for v in self.principals]
-        if self.role is not None: body['role'] = self.role
+        if self.principals:
+            body["principals"] = [v for v in self.principals]
+        if self.role is not None:
+            body["role"] = self.role
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GrantRule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.principals: body['principals'] = self.principals
-        if self.role is not None: body['role'] = self.role
+        if self.principals:
+            body["principals"] = self.principals
+        if self.role is not None:
+            body["role"] = self.role
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GrantRule:
         """Deserializes the GrantRule from a dictionary."""
-        return cls(principals=d.get('principals', None), role=d.get('role', None))
+        return cls(principals=d.get("principals", None), role=d.get("role", None))
 
 
 @dataclass
@@ -394,48 +445,68 @@ class Group:
     def as_dict(self) -> dict:
         """Serializes the Group into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['displayName'] = self.display_name
-        if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements]
-        if self.external_id is not None: body['externalId'] = self.external_id
-        if self.groups: body['groups'] = [v.as_dict() for v in self.groups]
-        if self.id is not None: body['id'] = self.id
-        if self.members: body['members'] = [v.as_dict() for v in self.members]
-        if self.meta: body['meta'] = self.meta.as_dict()
-        if self.roles: body['roles'] = [v.as_dict() for v in self.roles]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
+        if self.display_name is not None:
+            body["displayName"] = self.display_name
+        if self.entitlements:
+            body["entitlements"] = [v.as_dict() for v in self.entitlements]
+        if self.external_id is not None:
+            body["externalId"] = self.external_id
+        if self.groups:
+            body["groups"] = [v.as_dict() for v in self.groups]
+        if self.id is not None:
+            body["id"] = self.id
+        if self.members:
+            body["members"] = [v.as_dict() for v in self.members]
+        if self.meta:
+            body["meta"] = self.meta.as_dict()
+        if self.roles:
+            body["roles"] = [v.as_dict() for v in self.roles]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Group into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['displayName'] = self.display_name
-        if self.entitlements: body['entitlements'] = self.entitlements
-        if self.external_id is not None: body['externalId'] = self.external_id
-        if self.groups: body['groups'] = self.groups
-        if self.id is not None: body['id'] = self.id
-        if self.members: body['members'] = self.members
-        if self.meta: body['meta'] = self.meta
-        if self.roles: body['roles'] = self.roles
-        if self.schemas: body['schemas'] = self.schemas
+        if self.display_name is not None:
+            body["displayName"] = self.display_name
+        if self.entitlements:
+            body["entitlements"] = self.entitlements
+        if self.external_id is not None:
+            body["externalId"] = self.external_id
+        if self.groups:
+            body["groups"] = self.groups
+        if self.id is not None:
+            body["id"] = self.id
+        if self.members:
+            body["members"] = self.members
+        if self.meta:
+            body["meta"] = self.meta
+        if self.roles:
+            body["roles"] = self.roles
+        if self.schemas:
+            body["schemas"] = self.schemas
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Group:
         """Deserializes the Group from a dictionary."""
-        return cls(display_name=d.get('displayName', None),
-                   entitlements=_repeated_dict(d, 'entitlements', ComplexValue),
-                   external_id=d.get('externalId', None),
-                   groups=_repeated_dict(d, 'groups', ComplexValue),
-                   id=d.get('id', None),
-                   members=_repeated_dict(d, 'members', ComplexValue),
-                   meta=_from_dict(d, 'meta', ResourceMeta),
-                   roles=_repeated_dict(d, 'roles', ComplexValue),
-                   schemas=_repeated_enum(d, 'schemas', GroupSchema))
+        return cls(
+            display_name=d.get("displayName", None),
+            entitlements=_repeated_dict(d, "entitlements", ComplexValue),
+            external_id=d.get("externalId", None),
+            groups=_repeated_dict(d, "groups", ComplexValue),
+            id=d.get("id", None),
+            members=_repeated_dict(d, "members", ComplexValue),
+            meta=_from_dict(d, "meta", ResourceMeta),
+            roles=_repeated_dict(d, "roles", ComplexValue),
+            schemas=_repeated_enum(d, "schemas", GroupSchema),
+        )
 
 
 class GroupSchema(Enum):
 
-    URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP = 'urn:ietf:params:scim:schemas:core:2.0:Group'
+    URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP = "urn:ietf:params:scim:schemas:core:2.0:Group"
 
 
 @dataclass
@@ -458,36 +529,48 @@ class ListGroupsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
-        if self.resources: body['Resources'] = [v.as_dict() for v in self.resources]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
-        if self.start_index is not None: body['startIndex'] = self.start_index
-        if self.total_results is not None: body['totalResults'] = self.total_results
+        if self.items_per_page is not None:
+            body["itemsPerPage"] = self.items_per_page
+        if self.resources:
+            body["Resources"] = [v.as_dict() for v in self.resources]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
+        if self.start_index is not None:
+            body["startIndex"] = self.start_index
+        if self.total_results is not None:
+            body["totalResults"] = self.total_results
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
-        if self.resources: body['Resources'] = self.resources
-        if self.schemas: body['schemas'] = self.schemas
-        if self.start_index is not None: body['startIndex'] = self.start_index
-        if self.total_results is not None: body['totalResults'] = self.total_results
+        if self.items_per_page is not None:
+            body["itemsPerPage"] = self.items_per_page
+        if self.resources:
+            body["Resources"] = self.resources
+        if self.schemas:
+            body["schemas"] = self.schemas
+        if self.start_index is not None:
+            body["startIndex"] = self.start_index
+        if self.total_results is not None:
+            body["totalResults"] = self.total_results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListGroupsResponse:
         """Deserializes the ListGroupsResponse from a dictionary."""
-        return cls(items_per_page=d.get('itemsPerPage', None),
-                   resources=_repeated_dict(d, 'Resources', Group),
-                   schemas=_repeated_enum(d, 'schemas', ListResponseSchema),
-                   start_index=d.get('startIndex', None),
-                   total_results=d.get('totalResults', None))
+        return cls(
+            items_per_page=d.get("itemsPerPage", None),
+            resources=_repeated_dict(d, "Resources", Group),
+            schemas=_repeated_enum(d, "schemas", ListResponseSchema),
+            start_index=d.get("startIndex", None),
+            total_results=d.get("totalResults", None),
+        )
 
 
 class ListResponseSchema(Enum):
 
-    URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = 'urn:ietf:params:scim:api:messages:2.0:ListResponse'
+    URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse"
 
 
 @dataclass
@@ -510,37 +593,49 @@ class ListServicePrincipalResponse:
     def as_dict(self) -> dict:
         """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
-        if self.resources: body['Resources'] = [v.as_dict() for v in self.resources]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
-        if self.start_index is not None: body['startIndex'] = self.start_index
-        if self.total_results is not None: body['totalResults'] = self.total_results
+        if self.items_per_page is not None:
+            body["itemsPerPage"] = self.items_per_page
+        if self.resources:
+            body["Resources"] = [v.as_dict() for v in self.resources]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
+        if self.start_index is not None:
+            body["startIndex"] = self.start_index
+        if self.total_results is not None:
+            body["totalResults"] = self.total_results
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
-        if self.resources: body['Resources'] = self.resources
-        if self.schemas: body['schemas'] = self.schemas
-        if self.start_index is not None: body['startIndex'] = self.start_index
-        if self.total_results is not None: body['totalResults'] = self.total_results
+        if self.items_per_page is not None:
+            body["itemsPerPage"] = self.items_per_page
+        if self.resources:
+            body["Resources"] = self.resources
+        if self.schemas:
+            body["schemas"] = self.schemas
+        if self.start_index is not None:
+            body["startIndex"] = self.start_index
+        if self.total_results is not None:
+            body["totalResults"] = self.total_results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalResponse:
         """Deserializes the ListServicePrincipalResponse from a dictionary."""
-        return cls(items_per_page=d.get('itemsPerPage', None),
-                   resources=_repeated_dict(d, 'Resources', ServicePrincipal),
-                   schemas=_repeated_enum(d, 'schemas', ListResponseSchema),
-                   start_index=d.get('startIndex', None),
-                   total_results=d.get('totalResults', None))
+        return cls(
+            items_per_page=d.get("itemsPerPage", None),
+            resources=_repeated_dict(d, "Resources", ServicePrincipal),
+            schemas=_repeated_enum(d, "schemas", ListResponseSchema),
+            start_index=d.get("startIndex", None),
+            total_results=d.get("totalResults", None),
+        )
 
 
 class ListSortOrder(Enum):
 
-    ASCENDING = 'ascending'
-    DESCENDING = 'descending'
+    ASCENDING = "ascending"
+    DESCENDING = "descending"
 
 
 @dataclass
@@ -563,31 +658,43 @@ class ListUsersResponse:
     def as_dict(self) -> dict:
         """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
-        if self.resources: body['Resources'] = [v.as_dict() for v in self.resources]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
-        if self.start_index is not None: body['startIndex'] = self.start_index
-        if self.total_results is not None: body['totalResults'] = self.total_results
+        if self.items_per_page is not None:
+            body["itemsPerPage"] = self.items_per_page
+        if self.resources:
+            body["Resources"] = [v.as_dict() for v in self.resources]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
+        if self.start_index is not None:
+            body["startIndex"] = self.start_index
+        if self.total_results is not None:
+            body["totalResults"] = self.total_results
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
-        if self.resources: body['Resources'] = self.resources
-        if self.schemas: body['schemas'] = self.schemas
-        if self.start_index is not None: body['startIndex'] = self.start_index
-        if self.total_results is not None: body['totalResults'] = self.total_results
+        if self.items_per_page is not None:
+            body["itemsPerPage"] = self.items_per_page
+        if self.resources:
+            body["Resources"] = self.resources
+        if self.schemas:
+            body["schemas"] = self.schemas
+        if self.start_index is not None:
+            body["startIndex"] = self.start_index
+        if self.total_results is not None:
+            body["totalResults"] = self.total_results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse:
         """Deserializes the ListUsersResponse from a dictionary."""
-        return cls(items_per_page=d.get('itemsPerPage', None),
-                   resources=_repeated_dict(d, 'Resources', User),
-                   schemas=_repeated_enum(d, 'schemas', ListResponseSchema),
-                   start_index=d.get('startIndex', None),
-                   total_results=d.get('totalResults', None))
+        return cls(
+            items_per_page=d.get("itemsPerPage", None),
+            resources=_repeated_dict(d, "Resources", User),
+            schemas=_repeated_enum(d, "schemas", ListResponseSchema),
+            start_index=d.get("startIndex", None),
+            total_results=d.get("totalResults", None),
+        )
 
 
 @dataclass
@@ -608,29 +715,37 @@ def as_dict(self) -> dict:
         """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.from_workspace_group_name is not None:
-            body['from_workspace_group_name'] = self.from_workspace_group_name
-        if self.size is not None: body['size'] = self.size
-        if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+            body["from_workspace_group_name"] = self.from_workspace_group_name
+        if self.size is not None:
+            body["size"] = self.size
+        if self.to_account_group_name is not None:
+            body["to_account_group_name"] = self.to_account_group_name
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.from_workspace_group_name is not None:
-            body['from_workspace_group_name'] = self.from_workspace_group_name
-        if self.size is not None: body['size'] = self.size
-        if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+            body["from_workspace_group_name"] = self.from_workspace_group_name
+        if self.size is not None:
+            body["size"] = self.size
+        if self.to_account_group_name is not None:
+            body["to_account_group_name"] = self.to_account_group_name
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest:
         """Deserializes the MigratePermissionsRequest from a dictionary."""
-        return cls(from_workspace_group_name=d.get('from_workspace_group_name', None),
-                   size=d.get('size', None),
-                   to_account_group_name=d.get('to_account_group_name', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            from_workspace_group_name=d.get("from_workspace_group_name", None),
+            size=d.get("size", None),
+            to_account_group_name=d.get("to_account_group_name", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -641,19 +756,21 @@ class MigratePermissionsResponse:
     def as_dict(self) -> dict:
         """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
+        if self.permissions_migrated is not None:
+            body["permissions_migrated"] = self.permissions_migrated
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MigratePermissionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
+        if self.permissions_migrated is not None:
+            body["permissions_migrated"] = self.permissions_migrated
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse:
         """Deserializes the MigratePermissionsResponse from a dictionary."""
-        return cls(permissions_migrated=d.get('permissions_migrated', None))
+        return cls(permissions_migrated=d.get("permissions_migrated", None))
 
 
 @dataclass
@@ -667,21 +784,28 @@ class Name:
     def as_dict(self) -> dict:
         """Serializes the Name into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.family_name is not None: body['familyName'] = self.family_name
-        if self.given_name is not None: body['givenName'] = self.given_name
+        if self.family_name is not None:
+            body["familyName"] = self.family_name
+        if self.given_name is not None:
+            body["givenName"] = self.given_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Name into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.family_name is not None: body['familyName'] = self.family_name
-        if self.given_name is not None: body['givenName'] = self.given_name
+        if self.family_name is not None:
+            body["familyName"] = self.family_name
+        if self.given_name is not None:
+            body["givenName"] = self.given_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Name:
         """Deserializes the Name from a dictionary."""
-        return cls(family_name=d.get('familyName', None), given_name=d.get('givenName', None))
+        return cls(
+            family_name=d.get("familyName", None),
+            given_name=d.get("givenName", None),
+        )
 
 
 @dataclass
@@ -696,25 +820,32 @@ def as_dict(self) -> dict:
         """Serializes the ObjectPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ObjectPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ObjectPermissions:
         """Deserializes the ObjectPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", AccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -730,25 +861,33 @@ class PartialUpdate:
     def as_dict(self) -> dict:
         """Serializes the PartialUpdate into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.operations: body['Operations'] = [v.as_dict() for v in self.operations]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
+        if self.id is not None:
+            body["id"] = self.id
+        if self.operations:
+            body["Operations"] = [v.as_dict() for v in self.operations]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.operations: body['Operations'] = self.operations
-        if self.schemas: body['schemas'] = self.schemas
+        if self.id is not None:
+            body["id"] = self.id
+        if self.operations:
+            body["Operations"] = self.operations
+        if self.schemas:
+            body["schemas"] = self.schemas
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartialUpdate:
         """Deserializes the PartialUpdate from a dictionary."""
-        return cls(id=d.get('id', None),
-                   operations=_repeated_dict(d, 'Operations', Patch),
-                   schemas=_repeated_enum(d, 'schemas', PatchSchema))
+        return cls(
+            id=d.get("id", None),
+            operations=_repeated_dict(d, "Operations", Patch),
+            schemas=_repeated_enum(d, "schemas", PatchSchema),
+        )
 
 
 @dataclass
@@ -768,30 +907,38 @@ class PasswordAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the PasswordAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PasswordAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlRequest:
         """Deserializes the PasswordAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', PasswordPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", PasswordPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -814,33 +961,43 @@ class PasswordAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the PasswordAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PasswordAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlResponse:
         """Deserializes the PasswordAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', PasswordPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", PasswordPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -855,31 +1012,39 @@ class PasswordPermission:
     def as_dict(self) -> dict:
         """Serializes the PasswordPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PasswordPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermission:
         """Deserializes the PasswordPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', PasswordPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", PasswordPermissionLevel),
+        )
 
 
 class PasswordPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_USE = 'CAN_USE'
+    CAN_USE = "CAN_USE"
 
 
 @dataclass
@@ -894,26 +1059,32 @@ def as_dict(self) -> dict:
         """Serializes the PasswordPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PasswordPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissions:
         """Deserializes the PasswordPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      PasswordAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -926,22 +1097,28 @@ class PasswordPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the PasswordPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PasswordPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsDescription:
         """Deserializes the PasswordPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', PasswordPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", PasswordPermissionLevel),
+        )
 
 
 @dataclass
@@ -952,19 +1129,20 @@ def as_dict(self) -> dict:
         """Serializes the PasswordPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsRequest:
         """Deserializes the PasswordPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', PasswordAccessControlRequest))
+        return cls(access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlRequest))
 
 
 @dataclass
@@ -981,31 +1159,41 @@ class Patch:
     def as_dict(self) -> dict:
         """Serializes the Patch into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.op is not None: body['op'] = self.op.value
-        if self.path is not None: body['path'] = self.path
-        if self.value: body['value'] = self.value
+        if self.op is not None:
+            body["op"] = self.op.value
+        if self.path is not None:
+            body["path"] = self.path
+        if self.value:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Patch into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.op is not None: body['op'] = self.op
-        if self.path is not None: body['path'] = self.path
-        if self.value: body['value'] = self.value
+        if self.op is not None:
+            body["op"] = self.op
+        if self.path is not None:
+            body["path"] = self.path
+        if self.value:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Patch:
         """Deserializes the Patch from a dictionary."""
-        return cls(op=_enum(d, 'op', PatchOp), path=d.get('path', None), value=d.get('value', None))
+        return cls(
+            op=_enum(d, "op", PatchOp),
+            path=d.get("path", None),
+            value=d.get("value", None),
+        )
 
 
 class PatchOp(Enum):
     """Type of patch operation."""
 
-    ADD = 'add'
-    REMOVE = 'remove'
-    REPLACE = 'replace'
+    ADD = "add"
+    REMOVE = "remove"
+    REPLACE = "replace"
 
 
 @dataclass
@@ -1029,7 +1217,7 @@ def from_dict(cls, d: Dict[str, any]) -> PatchResponse:
 
 class PatchSchema(Enum):
 
-    URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = 'urn:ietf:params:scim:api:messages:2.0:PatchOp'
+    URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = "urn:ietf:params:scim:api:messages:2.0:PatchOp"
 
 
 @dataclass
@@ -1044,25 +1232,33 @@ class Permission:
     def as_dict(self) -> dict:
         """Serializes the Permission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Permission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Permission:
         """Deserializes the Permission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', PermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", PermissionLevel),
+        )
 
 
 @dataclass
@@ -1082,25 +1278,33 @@ class PermissionAssignment:
     def as_dict(self) -> dict:
         """Serializes the PermissionAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error is not None: body['error'] = self.error
-        if self.permissions: body['permissions'] = [v.value for v in self.permissions]
-        if self.principal: body['principal'] = self.principal.as_dict()
+        if self.error is not None:
+            body["error"] = self.error
+        if self.permissions:
+            body["permissions"] = [v.value for v in self.permissions]
+        if self.principal:
+            body["principal"] = self.principal.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error is not None: body['error'] = self.error
-        if self.permissions: body['permissions'] = self.permissions
-        if self.principal: body['principal'] = self.principal
+        if self.error is not None:
+            body["error"] = self.error
+        if self.permissions:
+            body["permissions"] = self.permissions
+        if self.principal:
+            body["principal"] = self.principal
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionAssignment:
         """Deserializes the PermissionAssignment from a dictionary."""
-        return cls(error=d.get('error', None),
-                   permissions=_repeated_enum(d, 'permissions', WorkspacePermission),
-                   principal=_from_dict(d, 'principal', PrincipalOutput))
+        return cls(
+            error=d.get("error", None),
+            permissions=_repeated_enum(d, "permissions", WorkspacePermission),
+            principal=_from_dict(d, "principal", PrincipalOutput),
+        )
 
 
 @dataclass
@@ -1112,41 +1316,42 @@ def as_dict(self) -> dict:
         """Serializes the PermissionAssignments into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.permission_assignments:
-            body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments]
+            body["permission_assignments"] = [v.as_dict() for v in self.permission_assignments]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_assignments: body['permission_assignments'] = self.permission_assignments
+        if self.permission_assignments:
+            body["permission_assignments"] = self.permission_assignments
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionAssignments:
         """Deserializes the PermissionAssignments from a dictionary."""
-        return cls(permission_assignments=_repeated_dict(d, 'permission_assignments', PermissionAssignment))
+        return cls(permission_assignments=_repeated_dict(d, "permission_assignments", PermissionAssignment))
 
 
 class PermissionLevel(Enum):
     """Permission level"""
 
-    CAN_ATTACH_TO = 'CAN_ATTACH_TO'
-    CAN_BIND = 'CAN_BIND'
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_EDIT_METADATA = 'CAN_EDIT_METADATA'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS'
-    CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
-    CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS'
-    CAN_MONITOR = 'CAN_MONITOR'
-    CAN_QUERY = 'CAN_QUERY'
-    CAN_READ = 'CAN_READ'
-    CAN_RESTART = 'CAN_RESTART'
-    CAN_RUN = 'CAN_RUN'
-    CAN_USE = 'CAN_USE'
-    CAN_VIEW = 'CAN_VIEW'
-    CAN_VIEW_METADATA = 'CAN_VIEW_METADATA'
-    IS_OWNER = 'IS_OWNER'
+    CAN_ATTACH_TO = "CAN_ATTACH_TO"
+    CAN_BIND = "CAN_BIND"
+    CAN_EDIT = "CAN_EDIT"
+    CAN_EDIT_METADATA = "CAN_EDIT_METADATA"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS"
+    CAN_MANAGE_RUN = "CAN_MANAGE_RUN"
+    CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS"
+    CAN_MONITOR = "CAN_MONITOR"
+    CAN_QUERY = "CAN_QUERY"
+    CAN_READ = "CAN_READ"
+    CAN_RESTART = "CAN_RESTART"
+    CAN_RUN = "CAN_RUN"
+    CAN_USE = "CAN_USE"
+    CAN_VIEW = "CAN_VIEW"
+    CAN_VIEW_METADATA = "CAN_VIEW_METADATA"
+    IS_OWNER = "IS_OWNER"
 
 
 @dataclass
@@ -1159,22 +1364,28 @@ class PermissionOutput:
     def as_dict(self) -> dict:
         """Serializes the PermissionOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionOutput:
         """Deserializes the PermissionOutput from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', WorkspacePermission))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", WorkspacePermission),
+        )
 
 
 @dataclass
@@ -1187,22 +1398,28 @@ class PermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the PermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsDescription:
         """Deserializes the PermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', PermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", PermissionLevel),
+        )
 
 
 @dataclass
@@ -1221,25 +1438,32 @@ def as_dict(self) -> dict:
         """Serializes the PermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.request_object_id is not None: body['request_object_id'] = self.request_object_id
-        if self.request_object_type is not None: body['request_object_type'] = self.request_object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.request_object_id is not None:
+            body["request_object_id"] = self.request_object_id
+        if self.request_object_type is not None:
+            body["request_object_type"] = self.request_object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.request_object_id is not None: body['request_object_id'] = self.request_object_id
-        if self.request_object_type is not None: body['request_object_type'] = self.request_object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.request_object_id is not None:
+            body["request_object_id"] = self.request_object_id
+        if self.request_object_type is not None:
+            body["request_object_type"] = self.request_object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest:
         """Deserializes the PermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlRequest),
-                   request_object_id=d.get('request_object_id', None),
-                   request_object_type=d.get('request_object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest),
+            request_object_id=d.get("request_object_id", None),
+            request_object_type=d.get("request_object_type", None),
+        )
 
 
 @dataclass
@@ -1264,41 +1488,51 @@ class PrincipalOutput:
     def as_dict(self) -> dict:
         """Serializes the PrincipalOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.principal_id is not None: body['principal_id'] = self.principal_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.principal_id is not None:
+            body["principal_id"] = self.principal_id
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PrincipalOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.principal_id is not None: body['principal_id'] = self.principal_id
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.principal_id is not None:
+            body["principal_id"] = self.principal_id
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput:
         """Deserializes the PrincipalOutput from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   principal_id=d.get('principal_id', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            principal_id=d.get("principal_id", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 class RequestAuthzIdentity(Enum):
     """Defines the identity to be used for authZ of the request on the server side. See one pager for
     for more information: http://go/acl/service-identity"""
 
-    REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY'
-    REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT'
+    REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY"
+    REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT"
 
 
 @dataclass
@@ -1315,25 +1549,33 @@ class ResourceInfo:
     def as_dict(self) -> dict:
         """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
-        if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.legacy_acl_path is not None:
+            body["legacy_acl_path"] = self.legacy_acl_path
+        if self.parent_resource_info:
+            body["parent_resource_info"] = self.parent_resource_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
-        if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info
+        if self.id is not None:
+            body["id"] = self.id
+        if self.legacy_acl_path is not None:
+            body["legacy_acl_path"] = self.legacy_acl_path
+        if self.parent_resource_info:
+            body["parent_resource_info"] = self.parent_resource_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResourceInfo:
         """Deserializes the ResourceInfo from a dictionary."""
-        return cls(id=d.get('id', None),
-                   legacy_acl_path=d.get('legacy_acl_path', None),
-                   parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo))
+        return cls(
+            id=d.get("id", None),
+            legacy_acl_path=d.get("legacy_acl_path", None),
+            parent_resource_info=_from_dict(d, "parent_resource_info", ResourceInfo),
+        )
 
 
 @dataclass
@@ -1345,19 +1587,21 @@ class ResourceMeta:
     def as_dict(self) -> dict:
         """Serializes the ResourceMeta into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.resource_type is not None: body['resourceType'] = self.resource_type
+        if self.resource_type is not None:
+            body["resourceType"] = self.resource_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResourceMeta into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.resource_type is not None: body['resourceType'] = self.resource_type
+        if self.resource_type is not None:
+            body["resourceType"] = self.resource_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResourceMeta:
         """Deserializes the ResourceMeta from a dictionary."""
-        return cls(resource_type=d.get('resourceType', None))
+        return cls(resource_type=d.get("resourceType", None))
 
 
 @dataclass
@@ -1368,19 +1612,21 @@ class Role:
     def as_dict(self) -> dict:
         """Serializes the Role into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Role into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Role:
         """Deserializes the Role from a dictionary."""
-        return cls(name=d.get('name', None))
+        return cls(name=d.get("name", None))
 
 
 @dataclass
@@ -1396,25 +1642,33 @@ class RuleSetResponse:
     def as_dict(self) -> dict:
         """Serializes the RuleSetResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules]
-        if self.name is not None: body['name'] = self.name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.grant_rules:
+            body["grant_rules"] = [v.as_dict() for v in self.grant_rules]
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RuleSetResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.grant_rules: body['grant_rules'] = self.grant_rules
-        if self.name is not None: body['name'] = self.name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.grant_rules:
+            body["grant_rules"] = self.grant_rules
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RuleSetResponse:
         """Deserializes the RuleSetResponse from a dictionary."""
-        return cls(etag=d.get('etag', None),
-                   grant_rules=_repeated_dict(d, 'grant_rules', GrantRule),
-                   name=d.get('name', None))
+        return cls(
+            etag=d.get("etag", None),
+            grant_rules=_repeated_dict(d, "grant_rules", GrantRule),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -1431,25 +1685,33 @@ class RuleSetUpdateRequest:
     def as_dict(self) -> dict:
         """Serializes the RuleSetUpdateRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules]
-        if self.name is not None: body['name'] = self.name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.grant_rules:
+            body["grant_rules"] = [v.as_dict() for v in self.grant_rules]
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RuleSetUpdateRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.grant_rules: body['grant_rules'] = self.grant_rules
-        if self.name is not None: body['name'] = self.name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.grant_rules:
+            body["grant_rules"] = self.grant_rules
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RuleSetUpdateRequest:
         """Deserializes the RuleSetUpdateRequest from a dictionary."""
-        return cls(etag=d.get('etag', None),
-                   grant_rules=_repeated_dict(d, 'grant_rules', GrantRule),
-                   name=d.get('name', None))
+        return cls(
+            etag=d.get("etag", None),
+            grant_rules=_repeated_dict(d, "grant_rules", GrantRule),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -1485,48 +1747,68 @@ class ServicePrincipal:
     def as_dict(self) -> dict:
         """Serializes the ServicePrincipal into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.active is not None: body['active'] = self.active
-        if self.application_id is not None: body['applicationId'] = self.application_id
-        if self.display_name is not None: body['displayName'] = self.display_name
-        if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements]
-        if self.external_id is not None: body['externalId'] = self.external_id
-        if self.groups: body['groups'] = [v.as_dict() for v in self.groups]
-        if self.id is not None: body['id'] = self.id
-        if self.roles: body['roles'] = [v.as_dict() for v in self.roles]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
+        if self.active is not None:
+            body["active"] = self.active
+        if self.application_id is not None:
+            body["applicationId"] = self.application_id
+        if self.display_name is not None:
+            body["displayName"] = self.display_name
+        if self.entitlements:
+            body["entitlements"] = [v.as_dict() for v in self.entitlements]
+        if self.external_id is not None:
+            body["externalId"] = self.external_id
+        if self.groups:
+            body["groups"] = [v.as_dict() for v in self.groups]
+        if self.id is not None:
+            body["id"] = self.id
+        if self.roles:
+            body["roles"] = [v.as_dict() for v in self.roles]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServicePrincipal into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.active is not None: body['active'] = self.active
-        if self.application_id is not None: body['applicationId'] = self.application_id
-        if self.display_name is not None: body['displayName'] = self.display_name
-        if self.entitlements: body['entitlements'] = self.entitlements
-        if self.external_id is not None: body['externalId'] = self.external_id
-        if self.groups: body['groups'] = self.groups
-        if self.id is not None: body['id'] = self.id
-        if self.roles: body['roles'] = self.roles
-        if self.schemas: body['schemas'] = self.schemas
+        if self.active is not None:
+            body["active"] = self.active
+        if self.application_id is not None:
+            body["applicationId"] = self.application_id
+        if self.display_name is not None:
+            body["displayName"] = self.display_name
+        if self.entitlements:
+            body["entitlements"] = self.entitlements
+        if self.external_id is not None:
+            body["externalId"] = self.external_id
+        if self.groups:
+            body["groups"] = self.groups
+        if self.id is not None:
+            body["id"] = self.id
+        if self.roles:
+            body["roles"] = self.roles
+        if self.schemas:
+            body["schemas"] = self.schemas
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServicePrincipal:
         """Deserializes the ServicePrincipal from a dictionary."""
-        return cls(active=d.get('active', None),
-                   application_id=d.get('applicationId', None),
-                   display_name=d.get('displayName', None),
-                   entitlements=_repeated_dict(d, 'entitlements', ComplexValue),
-                   external_id=d.get('externalId', None),
-                   groups=_repeated_dict(d, 'groups', ComplexValue),
-                   id=d.get('id', None),
-                   roles=_repeated_dict(d, 'roles', ComplexValue),
-                   schemas=_repeated_enum(d, 'schemas', ServicePrincipalSchema))
+        return cls(
+            active=d.get("active", None),
+            application_id=d.get("applicationId", None),
+            display_name=d.get("displayName", None),
+            entitlements=_repeated_dict(d, "entitlements", ComplexValue),
+            external_id=d.get("externalId", None),
+            groups=_repeated_dict(d, "groups", ComplexValue),
+            id=d.get("id", None),
+            roles=_repeated_dict(d, "roles", ComplexValue),
+            schemas=_repeated_enum(d, "schemas", ServicePrincipalSchema),
+        )
 
 
 class ServicePrincipalSchema(Enum):
 
-    URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = 'urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal'
+    URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal"
 
 
 @dataclass
@@ -1558,21 +1840,28 @@ class UpdateRuleSetRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateRuleSetRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.rule_set: body['rule_set'] = self.rule_set.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rule_set:
+            body["rule_set"] = self.rule_set.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.rule_set: body['rule_set'] = self.rule_set
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rule_set:
+            body["rule_set"] = self.rule_set
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest:
         """Deserializes the UpdateRuleSetRequest from a dictionary."""
-        return cls(name=d.get('name', None), rule_set=_from_dict(d, 'rule_set', RuleSetUpdateRequest))
+        return cls(
+            name=d.get("name", None),
+            rule_set=_from_dict(d, "rule_set", RuleSetUpdateRequest),
+        )
 
 
 @dataclass
@@ -1593,25 +1882,33 @@ class UpdateWorkspaceAssignments:
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permissions: body['permissions'] = [v.value for v in self.permissions]
-        if self.principal_id is not None: body['principal_id'] = self.principal_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.permissions:
+            body["permissions"] = [v.value for v in self.permissions]
+        if self.principal_id is not None:
+            body["principal_id"] = self.principal_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permissions: body['permissions'] = self.permissions
-        if self.principal_id is not None: body['principal_id'] = self.principal_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.permissions:
+            body["permissions"] = self.permissions
+        if self.principal_id is not None:
+            body["principal_id"] = self.principal_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceAssignments:
         """Deserializes the UpdateWorkspaceAssignments from a dictionary."""
-        return cls(permissions=_repeated_enum(d, 'permissions', WorkspacePermission),
-                   principal_id=d.get('principal_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            permissions=_repeated_enum(d, "permissions", WorkspacePermission),
+            principal_id=d.get("principal_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -1658,62 +1955,88 @@ class User:
     def as_dict(self) -> dict:
         """Serializes the User into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.active is not None: body['active'] = self.active
-        if self.display_name is not None: body['displayName'] = self.display_name
-        if self.emails: body['emails'] = [v.as_dict() for v in self.emails]
-        if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements]
-        if self.external_id is not None: body['externalId'] = self.external_id
-        if self.groups: body['groups'] = [v.as_dict() for v in self.groups]
-        if self.id is not None: body['id'] = self.id
-        if self.name: body['name'] = self.name.as_dict()
-        if self.roles: body['roles'] = [v.as_dict() for v in self.roles]
-        if self.schemas: body['schemas'] = [v.value for v in self.schemas]
-        if self.user_name is not None: body['userName'] = self.user_name
+        if self.active is not None:
+            body["active"] = self.active
+        if self.display_name is not None:
+            body["displayName"] = self.display_name
+        if self.emails:
+            body["emails"] = [v.as_dict() for v in self.emails]
+        if self.entitlements:
+            body["entitlements"] = [v.as_dict() for v in self.entitlements]
+        if self.external_id is not None:
+            body["externalId"] = self.external_id
+        if self.groups:
+            body["groups"] = [v.as_dict() for v in self.groups]
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name:
+            body["name"] = self.name.as_dict()
+        if self.roles:
+            body["roles"] = [v.as_dict() for v in self.roles]
+        if self.schemas:
+            body["schemas"] = [v.value for v in self.schemas]
+        if self.user_name is not None:
+            body["userName"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the User into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.active is not None: body['active'] = self.active
-        if self.display_name is not None: body['displayName'] = self.display_name
-        if self.emails: body['emails'] = self.emails
-        if self.entitlements: body['entitlements'] = self.entitlements
-        if self.external_id is not None: body['externalId'] = self.external_id
-        if self.groups: body['groups'] = self.groups
-        if self.id is not None: body['id'] = self.id
-        if self.name: body['name'] = self.name
-        if self.roles: body['roles'] = self.roles
-        if self.schemas: body['schemas'] = self.schemas
-        if self.user_name is not None: body['userName'] = self.user_name
+        if self.active is not None:
+            body["active"] = self.active
+        if self.display_name is not None:
+            body["displayName"] = self.display_name
+        if self.emails:
+            body["emails"] = self.emails
+        if self.entitlements:
+            body["entitlements"] = self.entitlements
+        if self.external_id is not None:
+            body["externalId"] = self.external_id
+        if self.groups:
+            body["groups"] = self.groups
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name:
+            body["name"] = self.name
+        if self.roles:
+            body["roles"] = self.roles
+        if self.schemas:
+            body["schemas"] = self.schemas
+        if self.user_name is not None:
+            body["userName"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> User:
         """Deserializes the User from a dictionary."""
-        return cls(active=d.get('active', None),
-                   display_name=d.get('displayName', None),
-                   emails=_repeated_dict(d, 'emails', ComplexValue),
-                   entitlements=_repeated_dict(d, 'entitlements', ComplexValue),
-                   external_id=d.get('externalId', None),
-                   groups=_repeated_dict(d, 'groups', ComplexValue),
-                   id=d.get('id', None),
-                   name=_from_dict(d, 'name', Name),
-                   roles=_repeated_dict(d, 'roles', ComplexValue),
-                   schemas=_repeated_enum(d, 'schemas', UserSchema),
-                   user_name=d.get('userName', None))
+        return cls(
+            active=d.get("active", None),
+            display_name=d.get("displayName", None),
+            emails=_repeated_dict(d, "emails", ComplexValue),
+            entitlements=_repeated_dict(d, "entitlements", ComplexValue),
+            external_id=d.get("externalId", None),
+            groups=_repeated_dict(d, "groups", ComplexValue),
+            id=d.get("id", None),
+            name=_from_dict(d, "name", Name),
+            roles=_repeated_dict(d, "roles", ComplexValue),
+            schemas=_repeated_enum(d, "schemas", UserSchema),
+            user_name=d.get("userName", None),
+        )
 
 
 class UserSchema(Enum):
 
-    URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER = 'urn:ietf:params:scim:schemas:core:2.0:User'
-    URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = 'urn:ietf:params:scim:schemas:extension:workspace:2.0:User'
+    URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER = "urn:ietf:params:scim:schemas:core:2.0:User"
+    URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = (
+        "urn:ietf:params:scim:schemas:extension:workspace:2.0:User"
+    )
 
 
 class WorkspacePermission(Enum):
 
-    ADMIN = 'ADMIN'
-    UNKNOWN = 'UNKNOWN'
-    USER = 'USER'
+    ADMIN = "ADMIN"
+    UNKNOWN = "UNKNOWN"
+    USER = "USER"
 
 
 @dataclass
@@ -1724,19 +2047,21 @@ class WorkspacePermissions:
     def as_dict(self) -> dict:
         """Serializes the WorkspacePermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions]
+        if self.permissions:
+            body["permissions"] = [v.as_dict() for v in self.permissions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspacePermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permissions: body['permissions'] = self.permissions
+        if self.permissions:
+            body["permissions"] = self.permissions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspacePermissions:
         """Deserializes the WorkspacePermissions from a dictionary."""
-        return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput))
+        return cls(permissions=_repeated_dict(d, "permissions", PermissionOutput))
 
 
 class AccessControlAPI:
@@ -1745,16 +2070,18 @@ class AccessControlAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def check_policy(self,
-                     actor: Actor,
-                     permission: str,
-                     resource: str,
-                     consistency_token: ConsistencyToken,
-                     authz_identity: RequestAuthzIdentity,
-                     *,
-                     resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse:
+    def check_policy(
+        self,
+        actor: Actor,
+        permission: str,
+        resource: str,
+        consistency_token: ConsistencyToken,
+        authz_identity: RequestAuthzIdentity,
+        *,
+        resource_info: Optional[ResourceInfo] = None,
+    ) -> CheckPolicyResponse:
         """Check access policy to a resource.
-        
+
         :param actor: :class:`Actor`
         :param permission: str
         :param resource: str
@@ -1763,20 +2090,33 @@ def check_policy(self,
         :param consistency_token: :class:`ConsistencyToken`
         :param authz_identity: :class:`RequestAuthzIdentity`
         :param resource_info: :class:`ResourceInfo` (optional)
-        
+
         :returns: :class:`CheckPolicyResponse`
         """
 
         query = {}
-        if actor is not None: query['actor'] = actor.as_dict()
-        if authz_identity is not None: query['authz_identity'] = authz_identity.value
-        if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict()
-        if permission is not None: query['permission'] = permission
-        if resource is not None: query['resource'] = resource
-        if resource_info is not None: query['resource_info'] = resource_info.as_dict()
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/access-control/check-policy-v2', query=query, headers=headers)
+        if actor is not None:
+            query["actor"] = actor.as_dict()
+        if authz_identity is not None:
+            query["authz_identity"] = authz_identity.value
+        if consistency_token is not None:
+            query["consistency_token"] = consistency_token.as_dict()
+        if permission is not None:
+            query["permission"] = permission
+        if resource is not None:
+            query["resource"] = resource
+        if resource_info is not None:
+            query["resource_info"] = resource_info.as_dict()
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/access-control/check-policy-v2",
+            query=query,
+            headers=headers,
+        )
         return CheckPolicyResponse.from_dict(res)
 
 
@@ -1790,33 +2130,37 @@ def __init__(self, api_client):
 
     def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse:
         """Get assignable roles for a resource.
-        
+
         Gets all the roles that can be granted on an account level resource. A role is grantable if the rule
         set on the resource can contain an access rule of the role.
-        
+
         :param resource: str
           The resource name for which assignable roles will be listed.
-        
+
         :returns: :class:`GetAssignableRolesForResourceResponse`
         """
 
         query = {}
-        if resource is not None: query['resource'] = resource
-        headers = {'Accept': 'application/json', }
+        if resource is not None:
+            query["resource"] = resource
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/assignable-roles',
+            "GET",
+            f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/assignable-roles",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return GetAssignableRolesForResourceResponse.from_dict(res)
 
     def get_rule_set(self, name: str, etag: str) -> RuleSetResponse:
         """Get a rule set.
-        
+
         Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
         rules on the said resource. Currently only a default rule set for each resource is supported.
-        
+
         :param name: str
           The ruleset name associated with the request.
         :param etag: str
@@ -1826,81 +2170,100 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse:
           modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
           etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
           version you are updating.
-        
+
         :returns: :class:`RuleSetResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        if name is not None: query['name'] = name
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets',
-                           query=query,
-                           headers=headers)
+        if etag is not None:
+            query["etag"] = etag
+        if name is not None:
+            query["name"] = name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets",
+            query=query,
+            headers=headers,
+        )
         return RuleSetResponse.from_dict(res)
 
     def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse:
         """Update a rule set.
-        
+
         Replace the rules of a rule set. First, use get to read the current version of the rule set before
         modifying it. This pattern helps prevent conflicts between concurrent updates.
-        
+
         :param name: str
           Name of the rule set.
         :param rule_set: :class:`RuleSetUpdateRequest`
-        
+
         :returns: :class:`RuleSetResponse`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if rule_set is not None: body['rule_set'] = rule_set.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if name is not None:
+            body["name"] = name
+        if rule_set is not None:
+            body["rule_set"] = rule_set.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets",
+            body=body,
+            headers=headers,
+        )
         return RuleSetResponse.from_dict(res)
 
 
 class AccountAccessControlProxyAPI:
     """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
     grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
-    called a rule set. A workspace must belong to an account for these APIs to work."""
+    called a rule set. A workspace must belong to an account for these APIs to work.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse:
         """Get assignable roles for a resource.
-        
+
         Gets all the roles that can be granted on an account-level resource. A role is grantable if the rule
         set on the resource can contain an access rule of the role.
-        
+
         :param resource: str
           The resource name for which assignable roles will be listed.
-        
+
         :returns: :class:`GetAssignableRolesForResourceResponse`
         """
 
         query = {}
-        if resource is not None: query['resource'] = resource
-        headers = {'Accept': 'application/json', }
+        if resource is not None:
+            query["resource"] = resource
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/preview/accounts/access-control/assignable-roles',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/preview/accounts/access-control/assignable-roles",
+            query=query,
+            headers=headers,
+        )
         return GetAssignableRolesForResourceResponse.from_dict(res)
 
     def get_rule_set(self, name: str, etag: str) -> RuleSetResponse:
         """Get a rule set.
-        
+
         Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
         rules on the said resource. Currently only a default rule set for each resource is supported.
-        
+
         :param name: str
           The ruleset name associated with the request.
         :param etag: str
@@ -1910,49 +2273,62 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse:
           modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
           etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
           version you are updating.
-        
+
         :returns: :class:`RuleSetResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        if name is not None: query['name'] = name
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           '/api/2.0/preview/accounts/access-control/rule-sets',
-                           query=query,
-                           headers=headers)
+        if etag is not None:
+            query["etag"] = etag
+        if name is not None:
+            query["name"] = name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/preview/accounts/access-control/rule-sets",
+            query=query,
+            headers=headers,
+        )
         return RuleSetResponse.from_dict(res)
 
     def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse:
         """Update a rule set.
-        
+
         Replace the rules of a rule set. First, use a GET rule set request to read the current version of the
         rule set before modifying it. This pattern helps prevent conflicts between concurrent updates.
-        
+
         :param name: str
           Name of the rule set.
         :param rule_set: :class:`RuleSetUpdateRequest`
-        
+
         :returns: :class:`RuleSetResponse`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if rule_set is not None: body['rule_set'] = rule_set.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if name is not None:
+            body["name"] = name
+        if rule_set is not None:
+            body["rule_set"] = rule_set.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           '/api/2.0/preview/accounts/access-control/rule-sets',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            "/api/2.0/preview/accounts/access-control/rule-sets",
+            body=body,
+            headers=headers,
+        )
         return RuleSetResponse.from_dict(res)
 
 
 class AccountGroupsAPI:
     """Groups simplify identity management, making it easier to assign access to Databricks account, data, and
     other securable objects.
-    
+
     It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
     instead of to users individually. All Databricks account identities can be assigned as members of groups,
     and members inherit permissions that are assigned to their group."""
@@ -1960,27 +2336,29 @@ class AccountGroupsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               id: Optional[str] = None,
-               members: Optional[List[ComplexValue]] = None,
-               meta: Optional[ResourceMeta] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[GroupSchema]] = None) -> Group:
+    def create(
+        self,
+        *,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        id: Optional[str] = None,
+        members: Optional[List[ComplexValue]] = None,
+        meta: Optional[ResourceMeta] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[GroupSchema]] = None,
+    ) -> Group:
         """Create a new group.
-        
+
         Creates a group in the Databricks account with a unique name, using the supplied group details.
-        
+
         :param display_name: str (optional)
           String that represents a human-readable group name
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
           values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -1993,75 +2371,97 @@ def create(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`GroupSchema`] (optional)
           The schema of the group.
-        
+
         :returns: :class:`Group`
         """
         body = {}
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if id is not None: body['id'] = id
-        if members is not None: body['members'] = [v.as_dict() for v in members]
-        if meta is not None: body['meta'] = meta.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups',
-                           body=body,
-                           headers=headers)
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if id is not None:
+            body["id"] = id
+        if members is not None:
+            body["members"] = [v.as_dict() for v in members]
+        if meta is not None:
+            body["meta"] = meta.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups",
+            body=body,
+            headers=headers,
+        )
         return Group.from_dict(res)
 
     def delete(self, id: str):
         """Delete a group.
-        
+
         Deletes a group from the Databricks account.
-        
+
         :param id: str
           Unique ID for a group in the Databricks account.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> Group:
         """Get group details.
-        
+
         Gets the information for a specific group in the Databricks account.
-        
+
         :param id: str
           Unique ID for a group in the Databricks account.
-        
+
         :returns: :class:`Group`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}",
+            headers=headers,
+        )
         return Group.from_dict(res)
 
-    def list(self,
-             *,
-             attributes: Optional[str] = None,
-             count: Optional[int] = None,
-             excluded_attributes: Optional[str] = None,
-             filter: Optional[str] = None,
-             sort_by: Optional[str] = None,
-             sort_order: Optional[ListSortOrder] = None,
-             start_index: Optional[int] = None) -> Iterator[Group]:
+    def list(
+        self,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[ListSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> Iterator[Group]:
         """List group details.
-        
+
         Gets all details of the groups associated with the Databricks account.
-        
+
         :param attributes: str (optional)
           Comma-separated list of attributes to return in response.
         :param count: int (optional)
@@ -2073,7 +2473,7 @@ def list(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results.
@@ -2081,82 +2481,105 @@ def list(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: Iterator over :class:`Group`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['startIndex'] = 1
-        if "count" not in query: query['count'] = 100
+        query["startIndex"] = 1
+        if "count" not in query:
+            query["count"] = 100
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups',
-                                query=query,
-                                headers=headers)
-            if 'Resources' in json:
-                for v in json['Resources']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups",
+                query=query,
+                headers=headers,
+            )
+            if "Resources" in json:
+                for v in json["Resources"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield Group.from_dict(v)
-            if 'Resources' not in json or not json['Resources']:
+            if "Resources" not in json or not json["Resources"]:
                 return
-            query['startIndex'] += len(json['Resources'])
-
-    def patch(self,
-              id: str,
-              *,
-              operations: Optional[List[Patch]] = None,
-              schemas: Optional[List[PatchSchema]] = None):
+            query["startIndex"] += len(json["Resources"])
+
+    def patch(
+        self,
+        id: str,
+        *,
+        operations: Optional[List[Patch]] = None,
+        schemas: Optional[List[PatchSchema]] = None,
+    ):
         """Update group details.
-        
+
         Partially updates the details of a group.
-        
+
         :param id: str
           Unique ID for a group in the Databricks account.
         :param operations: List[:class:`Patch`] (optional)
         :param schemas: List[:class:`PatchSchema`] (optional)
           The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
+
+
         """
         body = {}
-        if operations is not None: body['Operations'] = [v.as_dict() for v in operations]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}',
-                     body=body,
-                     headers=headers)
-
-    def update(self,
-               id: str,
-               *,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               members: Optional[List[ComplexValue]] = None,
-               meta: Optional[ResourceMeta] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[GroupSchema]] = None):
+        if operations is not None:
+            body["Operations"] = [v.as_dict() for v in operations]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        id: str,
+        *,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        members: Optional[List[ComplexValue]] = None,
+        meta: Optional[ResourceMeta] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[GroupSchema]] = None,
+    ):
         """Replace a group.
-        
+
         Updates the details of a group by replacing the entire group entity.
-        
+
         :param id: str
           Databricks group ID
         :param display_name: str (optional)
@@ -2164,7 +2587,7 @@ def update(self,
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
           values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -2175,24 +2598,37 @@ def update(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`GroupSchema`] (optional)
           The schema of the group.
-        
-        
+
+
         """
         body = {}
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if members is not None: body['members'] = [v.as_dict() for v in members]
-        if meta is not None: body['meta'] = meta.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if members is not None:
+            body["members"] = [v.as_dict() for v in members]
+        if meta is not None:
+            body["meta"] = meta.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}',
-                     body=body,
-                     headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class AccountServicePrincipalsAPI:
@@ -2205,21 +2641,23 @@ class AccountServicePrincipalsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               active: Optional[bool] = None,
-               application_id: Optional[str] = None,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               id: Optional[str] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[ServicePrincipalSchema]] = None) -> ServicePrincipal:
+    def create(
+        self,
+        *,
+        active: Optional[bool] = None,
+        application_id: Optional[str] = None,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        id: Optional[str] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[ServicePrincipalSchema]] = None,
+    ) -> ServicePrincipal:
         """Create a service principal.
-        
+
         Creates a new service principal in the Databricks account.
-        
+
         :param active: bool (optional)
           If this user is active
         :param application_id: str (optional)
@@ -2229,7 +2667,7 @@ def create(self,
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
           supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -2239,75 +2677,97 @@ def create(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
           The schema of the List response.
-        
+
         :returns: :class:`ServicePrincipal`
         """
         body = {}
-        if active is not None: body['active'] = active
-        if application_id is not None: body['applicationId'] = application_id
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if id is not None: body['id'] = id
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals',
-                           body=body,
-                           headers=headers)
+        if active is not None:
+            body["active"] = active
+        if application_id is not None:
+            body["applicationId"] = application_id
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if id is not None:
+            body["id"] = id
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals",
+            body=body,
+            headers=headers,
+        )
         return ServicePrincipal.from_dict(res)
 
     def delete(self, id: str):
         """Delete a service principal.
-        
+
         Delete a single service principal in the Databricks account.
-        
+
         :param id: str
           Unique ID for a service principal in the Databricks account.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> ServicePrincipal:
         """Get service principal details.
-        
+
         Gets the details for a single service principal define in the Databricks account.
-        
+
         :param id: str
           Unique ID for a service principal in the Databricks account.
-        
+
         :returns: :class:`ServicePrincipal`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}",
+            headers=headers,
+        )
         return ServicePrincipal.from_dict(res)
 
-    def list(self,
-             *,
-             attributes: Optional[str] = None,
-             count: Optional[int] = None,
-             excluded_attributes: Optional[str] = None,
-             filter: Optional[str] = None,
-             sort_by: Optional[str] = None,
-             sort_order: Optional[ListSortOrder] = None,
-             start_index: Optional[int] = None) -> Iterator[ServicePrincipal]:
+    def list(
+        self,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[ListSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> Iterator[ServicePrincipal]:
         """List service principals.
-        
+
         Gets the set of service principals associated with a Databricks account.
-        
+
         :param attributes: str (optional)
           Comma-separated list of attributes to return in response.
         :param count: int (optional)
@@ -2319,7 +2779,7 @@ def list(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results.
@@ -2327,84 +2787,107 @@ def list(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: Iterator over :class:`ServicePrincipal`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['startIndex'] = 1
-        if "count" not in query: query['count'] = 100
+        query["startIndex"] = 1
+        if "count" not in query:
+            query["count"] = 100
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals',
-                                query=query,
-                                headers=headers)
-            if 'Resources' in json:
-                for v in json['Resources']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals",
+                query=query,
+                headers=headers,
+            )
+            if "Resources" in json:
+                for v in json["Resources"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield ServicePrincipal.from_dict(v)
-            if 'Resources' not in json or not json['Resources']:
+            if "Resources" not in json or not json["Resources"]:
                 return
-            query['startIndex'] += len(json['Resources'])
-
-    def patch(self,
-              id: str,
-              *,
-              operations: Optional[List[Patch]] = None,
-              schemas: Optional[List[PatchSchema]] = None):
+            query["startIndex"] += len(json["Resources"])
+
+    def patch(
+        self,
+        id: str,
+        *,
+        operations: Optional[List[Patch]] = None,
+        schemas: Optional[List[PatchSchema]] = None,
+    ):
         """Update service principal details.
-        
+
         Partially updates the details of a single service principal in the Databricks account.
-        
+
         :param id: str
           Unique ID for a service principal in the Databricks account.
         :param operations: List[:class:`Patch`] (optional)
         :param schemas: List[:class:`PatchSchema`] (optional)
           The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
+
+
         """
         body = {}
-        if operations is not None: body['Operations'] = [v.as_dict() for v in operations]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}',
-                     body=body,
-                     headers=headers)
-
-    def update(self,
-               id: str,
-               *,
-               active: Optional[bool] = None,
-               application_id: Optional[str] = None,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[ServicePrincipalSchema]] = None):
+        if operations is not None:
+            body["Operations"] = [v.as_dict() for v in operations]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        id: str,
+        *,
+        active: Optional[bool] = None,
+        application_id: Optional[str] = None,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[ServicePrincipalSchema]] = None,
+    ):
         """Replace service principal.
-        
+
         Updates the details of a single service principal.
-        
+
         This action replaces the existing service principal with the same name.
-        
+
         :param id: str
           Databricks service principal ID.
         :param active: bool (optional)
@@ -2416,7 +2899,7 @@ def update(self,
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
           supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -2424,29 +2907,42 @@ def update(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
           The schema of the List response.
-        
-        
+
+
         """
         body = {}
-        if active is not None: body['active'] = active
-        if application_id is not None: body['applicationId'] = application_id
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if active is not None:
+            body["active"] = active
+        if application_id is not None:
+            body["applicationId"] = application_id
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}',
-                     body=body,
-                     headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class AccountUsersAPI:
     """User identities recognized by Databricks and represented by email addresses.
-    
+
     Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
     provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your
     identity provider to create users and groups in Databricks account and give them the proper level of
@@ -2458,37 +2954,39 @@ class AccountUsersAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               active: Optional[bool] = None,
-               display_name: Optional[str] = None,
-               emails: Optional[List[ComplexValue]] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               id: Optional[str] = None,
-               name: Optional[Name] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[UserSchema]] = None,
-               user_name: Optional[str] = None) -> User:
+    def create(
+        self,
+        *,
+        active: Optional[bool] = None,
+        display_name: Optional[str] = None,
+        emails: Optional[List[ComplexValue]] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        id: Optional[str] = None,
+        name: Optional[Name] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[UserSchema]] = None,
+        user_name: Optional[str] = None,
+    ) -> User:
         """Create a new user.
-        
+
         Creates a new user in the Databricks account. This new user will also be added to the Databricks
         account.
-        
+
         :param active: bool (optional)
           If this user is active
         :param display_name: str (optional)
           String that represents a concatenation of given and family names. For example `John Smith`. This
           field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
           Account SCIM APIs to update `displayName`.
-          
+
           [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
         :param emails: List[:class:`ComplexValue`] (optional)
           All the emails associated with the Databricks user.
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
           External ID is not currently supported. It is reserved for future use.
@@ -2503,61 +3001,81 @@ def create(self,
           The schema of the user.
         :param user_name: str (optional)
           Email address of the Databricks user.
-        
+
         :returns: :class:`User`
         """
         body = {}
-        if active is not None: body['active'] = active
-        if display_name is not None: body['displayName'] = display_name
-        if emails is not None: body['emails'] = [v.as_dict() for v in emails]
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if id is not None: body['id'] = id
-        if name is not None: body['name'] = name.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        if user_name is not None: body['userName'] = user_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users',
-                           body=body,
-                           headers=headers)
+        if active is not None:
+            body["active"] = active
+        if display_name is not None:
+            body["displayName"] = display_name
+        if emails is not None:
+            body["emails"] = [v.as_dict() for v in emails]
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if id is not None:
+            body["id"] = id
+        if name is not None:
+            body["name"] = name.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        if user_name is not None:
+            body["userName"] = user_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users",
+            body=body,
+            headers=headers,
+        )
         return User.from_dict(res)
 
     def delete(self, id: str):
         """Delete a user.
-        
+
         Deletes a user. Deleting a user from a Databricks account also removes objects associated with the
         user.
-        
+
         :param id: str
           Unique ID for a user in the Databricks account.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}',
-                     headers=headers)
-
-    def get(self,
-            id: str,
-            *,
-            attributes: Optional[str] = None,
-            count: Optional[int] = None,
-            excluded_attributes: Optional[str] = None,
-            filter: Optional[str] = None,
-            sort_by: Optional[str] = None,
-            sort_order: Optional[GetSortOrder] = None,
-            start_index: Optional[int] = None) -> User:
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}",
+            headers=headers,
+        )
+
+    def get(
+        self,
+        id: str,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[GetSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> User:
         """Get user details.
-        
+
         Gets information for a specific user in Databricks account.
-        
+
         :param id: str
           Unique ID for a user in the Databricks account.
         :param attributes: str (optional)
@@ -2571,7 +3089,7 @@ def get(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
@@ -2580,39 +3098,52 @@ def get(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: :class:`User`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}',
-                           query=query,
-                           headers=headers)
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}",
+            query=query,
+            headers=headers,
+        )
         return User.from_dict(res)
 
-    def list(self,
-             *,
-             attributes: Optional[str] = None,
-             count: Optional[int] = None,
-             excluded_attributes: Optional[str] = None,
-             filter: Optional[str] = None,
-             sort_by: Optional[str] = None,
-             sort_order: Optional[ListSortOrder] = None,
-             start_index: Optional[int] = None) -> Iterator[User]:
+    def list(
+        self,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[ListSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> Iterator[User]:
         """List users.
-        
+
         Gets details for all the users associated with a Databricks account.
-        
+
         :param attributes: str (optional)
           Comma-separated list of attributes to return in response.
         :param count: int (optional)
@@ -2624,7 +3155,7 @@ def list(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
@@ -2633,84 +3164,107 @@ def list(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: Iterator over :class:`User`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['startIndex'] = 1
-        if "count" not in query: query['count'] = 100
+        query["startIndex"] = 1
+        if "count" not in query:
+            query["count"] = 100
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users',
-                                query=query,
-                                headers=headers)
-            if 'Resources' in json:
-                for v in json['Resources']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users",
+                query=query,
+                headers=headers,
+            )
+            if "Resources" in json:
+                for v in json["Resources"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield User.from_dict(v)
-            if 'Resources' not in json or not json['Resources']:
+            if "Resources" not in json or not json["Resources"]:
                 return
-            query['startIndex'] += len(json['Resources'])
-
-    def patch(self,
-              id: str,
-              *,
-              operations: Optional[List[Patch]] = None,
-              schemas: Optional[List[PatchSchema]] = None):
+            query["startIndex"] += len(json["Resources"])
+
+    def patch(
+        self,
+        id: str,
+        *,
+        operations: Optional[List[Patch]] = None,
+        schemas: Optional[List[PatchSchema]] = None,
+    ):
         """Update user details.
-        
+
         Partially updates a user resource by applying the supplied operations on specific user attributes.
-        
+
         :param id: str
           Unique ID for a user in the Databricks account.
         :param operations: List[:class:`Patch`] (optional)
         :param schemas: List[:class:`PatchSchema`] (optional)
           The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
+
+
         """
         body = {}
-        if operations is not None: body['Operations'] = [v.as_dict() for v in operations]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}',
-                     body=body,
-                     headers=headers)
-
-    def update(self,
-               id: str,
-               *,
-               active: Optional[bool] = None,
-               display_name: Optional[str] = None,
-               emails: Optional[List[ComplexValue]] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               name: Optional[Name] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[UserSchema]] = None,
-               user_name: Optional[str] = None):
+        if operations is not None:
+            body["Operations"] = [v.as_dict() for v in operations]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        id: str,
+        *,
+        active: Optional[bool] = None,
+        display_name: Optional[str] = None,
+        emails: Optional[List[ComplexValue]] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        name: Optional[Name] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[UserSchema]] = None,
+        user_name: Optional[str] = None,
+    ):
         """Replace a user.
-        
+
         Replaces a user's information with the data supplied in request.
-        
+
         :param id: str
           Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
           be ignored.
@@ -2720,13 +3274,13 @@ def update(self,
           String that represents a concatenation of given and family names. For example `John Smith`. This
           field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
           Account SCIM APIs to update `displayName`.
-          
+
           [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
         :param emails: List[:class:`ComplexValue`] (optional)
           All the emails associated with the Databricks user.
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
           External ID is not currently supported. It is reserved for future use.
@@ -2738,26 +3292,41 @@ def update(self,
           The schema of the user.
         :param user_name: str (optional)
           Email address of the Databricks user.
-        
-        
+
+
         """
         body = {}
-        if active is not None: body['active'] = active
-        if display_name is not None: body['displayName'] = display_name
-        if emails is not None: body['emails'] = [v.as_dict() for v in emails]
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if name is not None: body['name'] = name.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        if user_name is not None: body['userName'] = user_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if active is not None:
+            body["active"] = active
+        if display_name is not None:
+            body["displayName"] = display_name
+        if emails is not None:
+            body["emails"] = [v.as_dict() for v in emails]
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if name is not None:
+            body["name"] = name.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        if user_name is not None:
+            body["userName"] = user_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT',
-                     f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}',
-                     body=body,
-                     headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class CurrentUserAPI:
@@ -2768,50 +3337,55 @@ def __init__(self, api_client):
 
     def me(self) -> User:
         """Get current user info.
-        
+
         Get details about the current method caller's identity.
-        
+
         :returns: :class:`User`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/preview/scim/v2/Me', headers=headers)
+        res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers)
         return User.from_dict(res)
 
 
 class GroupsAPI:
     """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and
     other securable objects.
-    
+
     It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
     instead of to users individually. All Databricks workspace identities can be assigned as members of
-    groups, and members inherit permissions that are assigned to their group."""
+    groups, and members inherit permissions that are assigned to their group.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               id: Optional[str] = None,
-               members: Optional[List[ComplexValue]] = None,
-               meta: Optional[ResourceMeta] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[GroupSchema]] = None) -> Group:
+    def create(
+        self,
+        *,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        id: Optional[str] = None,
+        members: Optional[List[ComplexValue]] = None,
+        meta: Optional[ResourceMeta] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[GroupSchema]] = None,
+    ) -> Group:
         """Create a new group.
-        
+
         Creates a group in the Databricks workspace with a unique name, using the supplied group details.
-        
+
         :param display_name: str (optional)
           String that represents a human-readable group name
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
           values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -2824,68 +3398,89 @@ def create(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`GroupSchema`] (optional)
           The schema of the group.
-        
+
         :returns: :class:`Group`
         """
         body = {}
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if id is not None: body['id'] = id
-        if members is not None: body['members'] = [v.as_dict() for v in members]
-        if meta is not None: body['meta'] = meta.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/scim/v2/Groups', body=body, headers=headers)
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if id is not None:
+            body["id"] = id
+        if members is not None:
+            body["members"] = [v.as_dict() for v in members]
+        if meta is not None:
+            body["meta"] = meta.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/preview/scim/v2/Groups",
+            body=body,
+            headers=headers,
+        )
         return Group.from_dict(res)
 
     def delete(self, id: str):
         """Delete a group.
-        
+
         Deletes a group from the Databricks workspace.
-        
+
         :param id: str
           Unique ID for a group in the Databricks workspace.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.0/preview/scim/v2/Groups/{id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers)
 
     def get(self, id: str) -> Group:
         """Get group details.
-        
+
         Gets the information for a specific group in the Databricks workspace.
-        
+
         :param id: str
           Unique ID for a group in the Databricks workspace.
-        
+
         :returns: :class:`Group`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/preview/scim/v2/Groups/{id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers)
         return Group.from_dict(res)
 
-    def list(self,
-             *,
-             attributes: Optional[str] = None,
-             count: Optional[int] = None,
-             excluded_attributes: Optional[str] = None,
-             filter: Optional[str] = None,
-             sort_by: Optional[str] = None,
-             sort_order: Optional[ListSortOrder] = None,
-             start_index: Optional[int] = None) -> Iterator[Group]:
+    def list(
+        self,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[ListSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> Iterator[Group]:
         """List group details.
-        
+
         Gets all details of the groups associated with the Databricks workspace.
-        
+
         :param attributes: str (optional)
           Comma-separated list of attributes to return in response.
         :param count: int (optional)
@@ -2897,7 +3492,7 @@ def list(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results.
@@ -2905,76 +3500,105 @@ def list(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: Iterator over :class:`Group`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['startIndex'] = 1
-        if "count" not in query: query['count'] = 100
+        query["startIndex"] = 1
+        if "count" not in query:
+            query["count"] = 100
         while True:
-            json = self._api.do('GET', '/api/2.0/preview/scim/v2/Groups', query=query, headers=headers)
-            if 'Resources' in json:
-                for v in json['Resources']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                "/api/2.0/preview/scim/v2/Groups",
+                query=query,
+                headers=headers,
+            )
+            if "Resources" in json:
+                for v in json["Resources"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield Group.from_dict(v)
-            if 'Resources' not in json or not json['Resources']:
+            if "Resources" not in json or not json["Resources"]:
                 return
-            query['startIndex'] += len(json['Resources'])
-
-    def patch(self,
-              id: str,
-              *,
-              operations: Optional[List[Patch]] = None,
-              schemas: Optional[List[PatchSchema]] = None):
+            query["startIndex"] += len(json["Resources"])
+
+    def patch(
+        self,
+        id: str,
+        *,
+        operations: Optional[List[Patch]] = None,
+        schemas: Optional[List[PatchSchema]] = None,
+    ):
         """Update group details.
-        
+
         Partially updates the details of a group.
-        
+
         :param id: str
           Unique ID for a group in the Databricks workspace.
         :param operations: List[:class:`Patch`] (optional)
         :param schemas: List[:class:`PatchSchema`] (optional)
           The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
+
+
         """
         body = {}
-        if operations is not None: body['Operations'] = [v.as_dict() for v in operations]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', f'/api/2.0/preview/scim/v2/Groups/{id}', body=body, headers=headers)
-
-    def update(self,
-               id: str,
-               *,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               members: Optional[List[ComplexValue]] = None,
-               meta: Optional[ResourceMeta] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[GroupSchema]] = None):
+        if operations is not None:
+            body["Operations"] = [v.as_dict() for v in operations]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/preview/scim/v2/Groups/{id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        id: str,
+        *,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        members: Optional[List[ComplexValue]] = None,
+        meta: Optional[ResourceMeta] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[GroupSchema]] = None,
+    ):
         """Replace a group.
-        
+
         Updates the details of a group by replacing the entire group entity.
-        
+
         :param id: str
           Databricks group ID
         :param display_name: str (optional)
@@ -2982,7 +3606,7 @@ def update(self,
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
           values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -2993,21 +3617,37 @@ def update(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`GroupSchema`] (optional)
           The schema of the group.
-        
-        
+
+
         """
         body = {}
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if members is not None: body['members'] = [v.as_dict() for v in members]
-        if meta is not None: body['meta'] = meta.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if members is not None:
+            body["members"] = [v.as_dict() for v in members]
+        if meta is not None:
+            body["meta"] = meta.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT', f'/api/2.0/preview/scim/v2/Groups/{id}', body=body, headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/preview/scim/v2/Groups/{id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class PermissionMigrationAPI:
@@ -3016,14 +3656,16 @@ class PermissionMigrationAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def migrate_permissions(self,
-                            workspace_id: int,
-                            from_workspace_group_name: str,
-                            to_account_group_name: str,
-                            *,
-                            size: Optional[int] = None) -> MigratePermissionsResponse:
+    def migrate_permissions(
+        self,
+        workspace_id: int,
+        from_workspace_group_name: str,
+        to_account_group_name: str,
+        *,
+        size: Optional[int] = None,
+    ) -> MigratePermissionsResponse:
         """Migrate Permissions.
-        
+
         :param workspace_id: int
           WorkspaceId of the associated workspace where the permission migration will occur.
         :param from_workspace_group_name: str
@@ -3032,130 +3674,146 @@ def migrate_permissions(self,
           The name of the account group that permissions will be migrated to.
         :param size: int (optional)
           The maximum number of permissions that will be migrated.
-        
+
         :returns: :class:`MigratePermissionsResponse`
         """
         body = {}
         if from_workspace_group_name is not None:
-            body['from_workspace_group_name'] = from_workspace_group_name
-        if size is not None: body['size'] = size
-        if to_account_group_name is not None: body['to_account_group_name'] = to_account_group_name
-        if workspace_id is not None: body['workspace_id'] = workspace_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/permissionmigration', body=body, headers=headers)
+            body["from_workspace_group_name"] = from_workspace_group_name
+        if size is not None:
+            body["size"] = size
+        if to_account_group_name is not None:
+            body["to_account_group_name"] = to_account_group_name
+        if workspace_id is not None:
+            body["workspace_id"] = workspace_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers)
         return MigratePermissionsResponse.from_dict(res)
 
 
 class PermissionsAPI:
     """Permissions API are used to create read, write, edit, update and manage access for various users on
     different objects and endpoints.
-    
+
     * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
-    
+
     * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
     clusters.
-    
+
     * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use cluster
     policies.
-    
+
     * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
     manage, run, cancel, or own a Delta Live Tables pipeline.
-    
+
     * **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a
     job.
-    
+
     * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or
     manage MLflow experiments.
-    
+
     * **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can read, edit,
     or manage MLflow registered models.
-    
+
     * **[Password permissions](:service:users)** — Manage which users can use password login when SSO is
     enabled.
-    
+
     * **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or attach to
     pools.
-    
+
     * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
-    
+
     * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, query, or
     manage a serving endpoint.
-    
+
     * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage SQL
     warehouses.
-    
+
     * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
-    
+
     * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
     manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
-    
+
     For the mapping of the required permissions for specific actions or abilities and other important
     information, see [Access Control].
-    
+
     Note that to manage access control on service principals, use **[Account Access Control
     Proxy](:service:accountaccesscontrolproxy)**.
-    
-    [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html"""
+
+    [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions:
         """Get object permissions.
-        
+
         Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
         object.
-        
+
         :param request_object_type: str
           The type of the request object. Can be one of the following: alerts, authorization, clusters,
           cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
           jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
         :param request_object_id: str
           The id of the request object.
-        
+
         :returns: :class:`ObjectPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/{request_object_type}/{request_object_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/{request_object_type}/{request_object_id}",
+            headers=headers,
+        )
         return ObjectPermissions.from_dict(res)
 
-    def get_permission_levels(self, request_object_type: str,
-                              request_object_id: str) -> GetPermissionLevelsResponse:
+    def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse:
         """Get object permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param request_object_type: str
           
         :param request_object_id: str
           
-        
+
         :returns: :class:`GetPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels",
+            headers=headers,
+        )
         return GetPermissionLevelsResponse.from_dict(res)
 
-    def set(self,
-            request_object_type: str,
-            request_object_id: str,
-            *,
-            access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions:
+    def set(
+        self,
+        request_object_type: str,
+        request_object_id: str,
+        *,
+        access_control_list: Optional[List[AccessControlRequest]] = None,
+    ) -> ObjectPermissions:
         """Set object permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their parent objects or root
         object.
-        
+
         :param request_object_type: str
           The type of the request object. Can be one of the following: alerts, authorization, clusters,
           cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
@@ -3163,30 +3821,37 @@ def set(self,
         :param request_object_id: str
           The id of the request object.
         :param access_control_list: List[:class:`AccessControlRequest`] (optional)
-        
+
         :returns: :class:`ObjectPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/{request_object_type}/{request_object_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/{request_object_type}/{request_object_id}",
+            body=body,
+            headers=headers,
+        )
         return ObjectPermissions.from_dict(res)
 
-    def update(self,
-               request_object_type: str,
-               request_object_id: str,
-               *,
-               access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions:
+    def update(
+        self,
+        request_object_type: str,
+        request_object_id: str,
+        *,
+        access_control_list: Optional[List[AccessControlRequest]] = None,
+    ) -> ObjectPermissions:
         """Update object permissions.
-        
+
         Updates the permissions on an object. Objects can inherit permissions from their parent objects or
         root object.
-        
+
         :param request_object_type: str
           The type of the request object. Can be one of the following: alerts, authorization, clusters,
           cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
@@ -3194,18 +3859,23 @@ def update(self,
         :param request_object_id: str
           The id of the request object.
         :param access_control_list: List[:class:`AccessControlRequest`] (optional)
-        
+
         :returns: :class:`ObjectPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/{request_object_type}/{request_object_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/{request_object_type}/{request_object_id}",
+            body=body,
+            headers=headers,
+        )
         return ObjectPermissions.from_dict(res)
 
 
@@ -3219,21 +3889,23 @@ class ServicePrincipalsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               active: Optional[bool] = None,
-               application_id: Optional[str] = None,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               id: Optional[str] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[ServicePrincipalSchema]] = None) -> ServicePrincipal:
+    def create(
+        self,
+        *,
+        active: Optional[bool] = None,
+        application_id: Optional[str] = None,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        id: Optional[str] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[ServicePrincipalSchema]] = None,
+    ) -> ServicePrincipal:
         """Create a service principal.
-        
+
         Creates a new service principal in the Databricks workspace.
-        
+
         :param active: bool (optional)
           If this user is active
         :param application_id: str (optional)
@@ -3243,7 +3915,7 @@ def create(self,
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
           supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -3253,68 +3925,97 @@ def create(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
           The schema of the List response.
-        
+
         :returns: :class:`ServicePrincipal`
         """
         body = {}
-        if active is not None: body['active'] = active
-        if application_id is not None: body['applicationId'] = application_id
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if id is not None: body['id'] = id
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/scim/v2/ServicePrincipals', body=body, headers=headers)
+        if active is not None:
+            body["active"] = active
+        if application_id is not None:
+            body["applicationId"] = application_id
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if id is not None:
+            body["id"] = id
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/preview/scim/v2/ServicePrincipals",
+            body=body,
+            headers=headers,
+        )
         return ServicePrincipal.from_dict(res)
 
     def delete(self, id: str):
         """Delete a service principal.
-        
+
         Delete a single service principal in the Databricks workspace.
-        
+
         :param id: str
           Unique ID for a service principal in the Databricks workspace.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> ServicePrincipal:
         """Get service principal details.
-        
+
         Gets the details for a single service principal define in the Databricks workspace.
-        
+
         :param id: str
           Unique ID for a service principal in the Databricks workspace.
-        
+
         :returns: :class:`ServicePrincipal`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}",
+            headers=headers,
+        )
         return ServicePrincipal.from_dict(res)
 
-    def list(self,
-             *,
-             attributes: Optional[str] = None,
-             count: Optional[int] = None,
-             excluded_attributes: Optional[str] = None,
-             filter: Optional[str] = None,
-             sort_by: Optional[str] = None,
-             sort_order: Optional[ListSortOrder] = None,
-             start_index: Optional[int] = None) -> Iterator[ServicePrincipal]:
+    def list(
+        self,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[ListSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> Iterator[ServicePrincipal]:
         """List service principals.
-        
+
         Gets the set of service principals associated with a Databricks workspace.
-        
+
         :param attributes: str (optional)
           Comma-separated list of attributes to return in response.
         :param count: int (optional)
@@ -3326,7 +4027,7 @@ def list(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results.
@@ -3334,81 +4035,107 @@ def list(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: Iterator over :class:`ServicePrincipal`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['startIndex'] = 1
-        if "count" not in query: query['count'] = 100
+        query["startIndex"] = 1
+        if "count" not in query:
+            query["count"] = 100
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/preview/scim/v2/ServicePrincipals',
-                                query=query,
-                                headers=headers)
-            if 'Resources' in json:
-                for v in json['Resources']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                "/api/2.0/preview/scim/v2/ServicePrincipals",
+                query=query,
+                headers=headers,
+            )
+            if "Resources" in json:
+                for v in json["Resources"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield ServicePrincipal.from_dict(v)
-            if 'Resources' not in json or not json['Resources']:
+            if "Resources" not in json or not json["Resources"]:
                 return
-            query['startIndex'] += len(json['Resources'])
-
-    def patch(self,
-              id: str,
-              *,
-              operations: Optional[List[Patch]] = None,
-              schemas: Optional[List[PatchSchema]] = None):
+            query["startIndex"] += len(json["Resources"])
+
+    def patch(
+        self,
+        id: str,
+        *,
+        operations: Optional[List[Patch]] = None,
+        schemas: Optional[List[PatchSchema]] = None,
+    ):
         """Update service principal details.
-        
+
         Partially updates the details of a single service principal in the Databricks workspace.
-        
+
         :param id: str
           Unique ID for a service principal in the Databricks workspace.
         :param operations: List[:class:`Patch`] (optional)
         :param schemas: List[:class:`PatchSchema`] (optional)
           The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
+
+
         """
         body = {}
-        if operations is not None: body['Operations'] = [v.as_dict() for v in operations]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body, headers=headers)
-
-    def update(self,
-               id: str,
-               *,
-               active: Optional[bool] = None,
-               application_id: Optional[str] = None,
-               display_name: Optional[str] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[ServicePrincipalSchema]] = None):
+        if operations is not None:
+            body["Operations"] = [v.as_dict() for v in operations]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        id: str,
+        *,
+        active: Optional[bool] = None,
+        application_id: Optional[str] = None,
+        display_name: Optional[str] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[ServicePrincipalSchema]] = None,
+    ):
         """Replace service principal.
-        
+
         Updates the details of a single service principal.
-        
+
         This action replaces the existing service principal with the same name.
-        
+
         :param id: str
           Databricks service principal ID.
         :param active: bool (optional)
@@ -3420,7 +4147,7 @@ def update(self,
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
           supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
         :param groups: List[:class:`ComplexValue`] (optional)
@@ -3428,26 +4155,42 @@ def update(self,
           Corresponds to AWS instance profile/arn role.
         :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
           The schema of the List response.
-        
-        
+
+
         """
         body = {}
-        if active is not None: body['active'] = active
-        if application_id is not None: body['applicationId'] = application_id
-        if display_name is not None: body['displayName'] = display_name
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if active is not None:
+            body["active"] = active
+        if application_id is not None:
+            body["applicationId"] = application_id
+        if display_name is not None:
+            body["displayName"] = display_name
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT', f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body, headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class UsersAPI:
     """User identities recognized by Databricks and represented by email addresses.
-    
+
     Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
     provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your
     identity provider to create users and groups in Databricks workspace and give them the proper level of
@@ -3459,37 +4202,39 @@ class UsersAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               active: Optional[bool] = None,
-               display_name: Optional[str] = None,
-               emails: Optional[List[ComplexValue]] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               id: Optional[str] = None,
-               name: Optional[Name] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[UserSchema]] = None,
-               user_name: Optional[str] = None) -> User:
+    def create(
+        self,
+        *,
+        active: Optional[bool] = None,
+        display_name: Optional[str] = None,
+        emails: Optional[List[ComplexValue]] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        id: Optional[str] = None,
+        name: Optional[Name] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[UserSchema]] = None,
+        user_name: Optional[str] = None,
+    ) -> User:
         """Create a new user.
-        
+
         Creates a new user in the Databricks workspace. This new user will also be added to the Databricks
         account.
-        
+
         :param active: bool (optional)
           If this user is active
         :param display_name: str (optional)
           String that represents a concatenation of given and family names. For example `John Smith`. This
           field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
           Account SCIM APIs to update `displayName`.
-          
+
           [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
         :param emails: List[:class:`ComplexValue`] (optional)
           All the emails associated with the Databricks user.
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
           External ID is not currently supported. It is reserved for future use.
@@ -3504,56 +4249,77 @@ def create(self,
           The schema of the user.
         :param user_name: str (optional)
           Email address of the Databricks user.
-        
+
         :returns: :class:`User`
         """
         body = {}
-        if active is not None: body['active'] = active
-        if display_name is not None: body['displayName'] = display_name
-        if emails is not None: body['emails'] = [v.as_dict() for v in emails]
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if id is not None: body['id'] = id
-        if name is not None: body['name'] = name.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        if user_name is not None: body['userName'] = user_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/scim/v2/Users', body=body, headers=headers)
+        if active is not None:
+            body["active"] = active
+        if display_name is not None:
+            body["displayName"] = display_name
+        if emails is not None:
+            body["emails"] = [v.as_dict() for v in emails]
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if id is not None:
+            body["id"] = id
+        if name is not None:
+            body["name"] = name.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        if user_name is not None:
+            body["userName"] = user_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/preview/scim/v2/Users",
+            body=body,
+            headers=headers,
+        )
         return User.from_dict(res)
 
     def delete(self, id: str):
         """Delete a user.
-        
+
         Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the
         user.
-        
+
         :param id: str
           Unique ID for a user in the Databricks workspace.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.0/preview/scim/v2/Users/{id}', headers=headers)
-
-    def get(self,
-            id: str,
-            *,
-            attributes: Optional[str] = None,
-            count: Optional[int] = None,
-            excluded_attributes: Optional[str] = None,
-            filter: Optional[str] = None,
-            sort_by: Optional[str] = None,
-            sort_order: Optional[GetSortOrder] = None,
-            start_index: Optional[int] = None) -> User:
+        self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Users/{id}", headers=headers)
+
+    def get(
+        self,
+        id: str,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[GetSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> User:
         """Get user details.
-        
+
         Gets information for a specific user in Databricks workspace.
-        
+
         :param id: str
           Unique ID for a user in the Databricks workspace.
         :param attributes: str (optional)
@@ -3567,7 +4333,7 @@ def get(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
@@ -3576,64 +4342,90 @@ def get(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: :class:`User`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.0/preview/scim/v2/Users/{id}', query=query, headers=headers)
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/preview/scim/v2/Users/{id}",
+            query=query,
+            headers=headers,
+        )
         return User.from_dict(res)
 
     def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse:
         """Get password permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :returns: :class:`GetPasswordPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/permissions/authorization/passwords/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/permissions/authorization/passwords/permissionLevels",
+            headers=headers,
+        )
         return GetPasswordPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self) -> PasswordPermissions:
         """Get password permissions.
-        
+
         Gets the permissions of all passwords. Passwords can inherit permissions from their root object.
-        
+
         :returns: :class:`PasswordPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/permissions/authorization/passwords', headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/permissions/authorization/passwords",
+            headers=headers,
+        )
         return PasswordPermissions.from_dict(res)
 
-    def list(self,
-             *,
-             attributes: Optional[str] = None,
-             count: Optional[int] = None,
-             excluded_attributes: Optional[str] = None,
-             filter: Optional[str] = None,
-             sort_by: Optional[str] = None,
-             sort_order: Optional[ListSortOrder] = None,
-             start_index: Optional[int] = None) -> Iterator[User]:
+    def list(
+        self,
+        *,
+        attributes: Optional[str] = None,
+        count: Optional[int] = None,
+        excluded_attributes: Optional[str] = None,
+        filter: Optional[str] = None,
+        sort_by: Optional[str] = None,
+        sort_order: Optional[ListSortOrder] = None,
+        start_index: Optional[int] = None,
+    ) -> Iterator[User]:
         """List users.
-        
+
         Gets details for all the users associated with a Databricks workspace.
-        
+
         :param attributes: str (optional)
           Comma-separated list of attributes to return in response.
         :param count: int (optional)
@@ -3645,7 +4437,7 @@ def list(self,
           contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
           formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
           only support simple expressions.
-          
+
           [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
         :param sort_by: str (optional)
           Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
@@ -3654,99 +4446,137 @@ def list(self,
           The order to sort the results.
         :param start_index: int (optional)
           Specifies the index of the first result. First item is number 1.
-        
+
         :returns: Iterator over :class:`User`
         """
 
         query = {}
-        if attributes is not None: query['attributes'] = attributes
-        if count is not None: query['count'] = count
-        if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes
-        if filter is not None: query['filter'] = filter
-        if sort_by is not None: query['sortBy'] = sort_by
-        if sort_order is not None: query['sortOrder'] = sort_order.value
-        if start_index is not None: query['startIndex'] = start_index
-        headers = {'Accept': 'application/json', }
+        if attributes is not None:
+            query["attributes"] = attributes
+        if count is not None:
+            query["count"] = count
+        if excluded_attributes is not None:
+            query["excludedAttributes"] = excluded_attributes
+        if filter is not None:
+            query["filter"] = filter
+        if sort_by is not None:
+            query["sortBy"] = sort_by
+        if sort_order is not None:
+            query["sortOrder"] = sort_order.value
+        if start_index is not None:
+            query["startIndex"] = start_index
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['startIndex'] = 1
-        if "count" not in query: query['count'] = 100
+        query["startIndex"] = 1
+        if "count" not in query:
+            query["count"] = 100
         while True:
-            json = self._api.do('GET', '/api/2.0/preview/scim/v2/Users', query=query, headers=headers)
-            if 'Resources' in json:
-                for v in json['Resources']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                "/api/2.0/preview/scim/v2/Users",
+                query=query,
+                headers=headers,
+            )
+            if "Resources" in json:
+                for v in json["Resources"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield User.from_dict(v)
-            if 'Resources' not in json or not json['Resources']:
+            if "Resources" not in json or not json["Resources"]:
                 return
-            query['startIndex'] += len(json['Resources'])
-
-    def patch(self,
-              id: str,
-              *,
-              operations: Optional[List[Patch]] = None,
-              schemas: Optional[List[PatchSchema]] = None):
+            query["startIndex"] += len(json["Resources"])
+
+    def patch(
+        self,
+        id: str,
+        *,
+        operations: Optional[List[Patch]] = None,
+        schemas: Optional[List[PatchSchema]] = None,
+    ):
         """Update user details.
-        
+
         Partially updates a user resource by applying the supplied operations on specific user attributes.
-        
+
         :param id: str
           Unique ID for a user in the Databricks workspace.
         :param operations: List[:class:`Patch`] (optional)
         :param schemas: List[:class:`PatchSchema`] (optional)
           The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
+
+
         """
         body = {}
-        if operations is not None: body['Operations'] = [v.as_dict() for v in operations]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if operations is not None:
+            body["Operations"] = [v.as_dict() for v in operations]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH', f'/api/2.0/preview/scim/v2/Users/{id}', body=body, headers=headers)
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/preview/scim/v2/Users/{id}",
+            body=body,
+            headers=headers,
+        )
 
     def set_permissions(
-            self,
-            *,
-            access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions:
+        self,
+        *,
+        access_control_list: Optional[List[PasswordAccessControlRequest]] = None,
+    ) -> PasswordPermissions:
         """Set password permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
-        
+
         :returns: :class:`PasswordPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', '/api/2.0/permissions/authorization/passwords', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            "/api/2.0/permissions/authorization/passwords",
+            body=body,
+            headers=headers,
+        )
         return PasswordPermissions.from_dict(res)
 
-    def update(self,
-               id: str,
-               *,
-               active: Optional[bool] = None,
-               display_name: Optional[str] = None,
-               emails: Optional[List[ComplexValue]] = None,
-               entitlements: Optional[List[ComplexValue]] = None,
-               external_id: Optional[str] = None,
-               groups: Optional[List[ComplexValue]] = None,
-               name: Optional[Name] = None,
-               roles: Optional[List[ComplexValue]] = None,
-               schemas: Optional[List[UserSchema]] = None,
-               user_name: Optional[str] = None):
+    def update(
+        self,
+        id: str,
+        *,
+        active: Optional[bool] = None,
+        display_name: Optional[str] = None,
+        emails: Optional[List[ComplexValue]] = None,
+        entitlements: Optional[List[ComplexValue]] = None,
+        external_id: Optional[str] = None,
+        groups: Optional[List[ComplexValue]] = None,
+        name: Optional[Name] = None,
+        roles: Optional[List[ComplexValue]] = None,
+        schemas: Optional[List[UserSchema]] = None,
+        user_name: Optional[str] = None,
+    ):
         """Replace a user.
-        
+
         Replaces a user's information with the data supplied in request.
-        
+
         :param id: str
           Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
           be ignored.
@@ -3756,13 +4586,13 @@ def update(self,
           String that represents a concatenation of given and family names. For example `John Smith`. This
           field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
           Account SCIM APIs to update `displayName`.
-          
+
           [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
         :param emails: List[:class:`ComplexValue`] (optional)
           All the emails associated with the Databricks user.
         :param entitlements: List[:class:`ComplexValue`] (optional)
           Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
+
           [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
         :param external_id: str (optional)
           External ID is not currently supported. It is reserved for future use.
@@ -3774,45 +4604,69 @@ def update(self,
           The schema of the user.
         :param user_name: str (optional)
           Email address of the Databricks user.
-        
-        
+
+
         """
         body = {}
-        if active is not None: body['active'] = active
-        if display_name is not None: body['displayName'] = display_name
-        if emails is not None: body['emails'] = [v.as_dict() for v in emails]
-        if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements]
-        if external_id is not None: body['externalId'] = external_id
-        if groups is not None: body['groups'] = [v.as_dict() for v in groups]
-        if name is not None: body['name'] = name.as_dict()
-        if roles is not None: body['roles'] = [v.as_dict() for v in roles]
-        if schemas is not None: body['schemas'] = [v.value for v in schemas]
-        if user_name is not None: body['userName'] = user_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if active is not None:
+            body["active"] = active
+        if display_name is not None:
+            body["displayName"] = display_name
+        if emails is not None:
+            body["emails"] = [v.as_dict() for v in emails]
+        if entitlements is not None:
+            body["entitlements"] = [v.as_dict() for v in entitlements]
+        if external_id is not None:
+            body["externalId"] = external_id
+        if groups is not None:
+            body["groups"] = [v.as_dict() for v in groups]
+        if name is not None:
+            body["name"] = name.as_dict()
+        if roles is not None:
+            body["roles"] = [v.as_dict() for v in roles]
+        if schemas is not None:
+            body["schemas"] = [v.value for v in schemas]
+        if user_name is not None:
+            body["userName"] = user_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT', f'/api/2.0/preview/scim/v2/Users/{id}', body=body, headers=headers)
+        self._api.do(
+            "PUT",
+            f"/api/2.0/preview/scim/v2/Users/{id}",
+            body=body,
+            headers=headers,
+        )
 
     def update_permissions(
-            self,
-            *,
-            access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions:
+        self,
+        *,
+        access_control_list: Optional[List[PasswordAccessControlRequest]] = None,
+    ) -> PasswordPermissions:
         """Update password permissions.
-        
+
         Updates the permissions on all passwords. Passwords can inherit permissions from their root object.
-        
+
         :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
-        
+
         :returns: :class:`PasswordPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/permissions/authorization/passwords',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/permissions/authorization/passwords",
+            body=body,
+            headers=headers,
+        )
         return PasswordPermissions.from_dict(res)
 
 
@@ -3825,74 +4679,85 @@ def __init__(self, api_client):
 
     def delete(self, workspace_id: int, principal_id: int):
         """Delete permissions assignment.
-        
+
         Deletes the workspace permissions assignment in a given account and workspace for the specified
         principal.
-        
+
         :param workspace_id: int
           The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}",
+            headers=headers,
+        )
 
     def get(self, workspace_id: int) -> WorkspacePermissions:
         """List workspace permissions.
-        
+
         Get an array of workspace permissions for the specified account and workspace.
-        
+
         :param workspace_id: int
           The workspace ID.
-        
+
         :returns: :class:`WorkspacePermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions",
+            headers=headers,
+        )
         return WorkspacePermissions.from_dict(res)
 
     def list(self, workspace_id: int) -> Iterator[PermissionAssignment]:
         """Get permission assignments.
-        
+
         Get the permission assignments for the specified Databricks account and Databricks workspace.
-        
+
         :param workspace_id: int
           The workspace ID for the account.
-        
+
         :returns: Iterator over :class:`PermissionAssignment`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         json = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments",
+            headers=headers,
+        )
         parsed = PermissionAssignments.from_dict(json).permission_assignments
         return parsed if parsed is not None else []
 
-    def update(self,
-               workspace_id: int,
-               principal_id: int,
-               *,
-               permissions: Optional[List[WorkspacePermission]] = None) -> PermissionAssignment:
+    def update(
+        self,
+        workspace_id: int,
+        principal_id: int,
+        *,
+        permissions: Optional[List[WorkspacePermission]] = None,
+    ) -> PermissionAssignment:
         """Create or update permissions assignment.
-        
+
         Creates or updates the workspace permissions assignment in a given account and workspace for the
         specified principal.
-        
+
         :param workspace_id: int
           The workspace ID.
         :param principal_id: int
@@ -3903,16 +4768,21 @@ def update(self,
           will be ignored. Note that excluding this field, or providing unsupported values, will have the same
           effect as providing an empty list, which will result in the deletion of all permissions for the
           principal.
-        
+
         :returns: :class:`PermissionAssignment`
         """
         body = {}
-        if permissions is not None: body['permissions'] = [v.value for v in permissions]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if permissions is not None:
+            body["permissions"] = [v.value for v in permissions]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PUT',
-            f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}',
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return PermissionAssignment.from_dict(res)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 6cc2e4213..ea66f8ce6 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 from databricks.sdk.service import compute
 
@@ -50,36 +50,48 @@ class BaseJob:
     def as_dict(self) -> dict:
         """Serializes the BaseJob into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_time is not None: body['created_time'] = self.created_time
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.created_time is not None:
+            body["created_time"] = self.created_time
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
-            body['effective_budget_policy_id'] = self.effective_budget_policy_id
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.settings: body['settings'] = self.settings.as_dict()
+            body["effective_budget_policy_id"] = self.effective_budget_policy_id
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.settings:
+            body["settings"] = self.settings.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BaseJob into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_time is not None: body['created_time'] = self.created_time
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.created_time is not None:
+            body["created_time"] = self.created_time
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
-            body['effective_budget_policy_id'] = self.effective_budget_policy_id
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.settings: body['settings'] = self.settings
+            body["effective_budget_policy_id"] = self.effective_budget_policy_id
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.settings:
+            body["settings"] = self.settings
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseJob:
         """Deserializes the BaseJob from a dictionary."""
-        return cls(created_time=d.get('created_time', None),
-                   creator_user_name=d.get('creator_user_name', None),
-                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
-                   has_more=d.get('has_more', None),
-                   job_id=d.get('job_id', None),
-                   settings=_from_dict(d, 'settings', JobSettings))
+        return cls(
+            created_time=d.get("created_time", None),
+            creator_user_name=d.get("creator_user_name", None),
+            effective_budget_policy_id=d.get("effective_budget_policy_id", None),
+            has_more=d.get("has_more", None),
+            job_id=d.get("job_id", None),
+            settings=_from_dict(d, "settings", JobSettings),
+        )
 
 
 @dataclass
@@ -240,119 +252,183 @@ class BaseRun:
     def as_dict(self) -> dict:
         """Serializes the BaseRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
-        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
-        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
-        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.description is not None: body['description'] = self.description
+        if self.attempt_number is not None:
+            body["attempt_number"] = self.attempt_number
+        if self.cleanup_duration is not None:
+            body["cleanup_duration"] = self.cleanup_duration
+        if self.cluster_instance:
+            body["cluster_instance"] = self.cluster_instance.as_dict()
+        if self.cluster_spec:
+            body["cluster_spec"] = self.cluster_spec.as_dict()
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.description is not None:
+            body["description"] = self.description
         if self.effective_performance_target is not None:
-            body['effective_performance_target'] = self.effective_performance_target.value
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
-        if self.git_source: body['git_source'] = self.git_source.as_dict()
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
-        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
-        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+            body["effective_performance_target"] = self.effective_performance_target.value
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.execution_duration is not None:
+            body["execution_duration"] = self.execution_duration
+        if self.git_source:
+            body["git_source"] = self.git_source.as_dict()
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.job_clusters:
+            body["job_clusters"] = [v.as_dict() for v in self.job_clusters]
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = [v.as_dict() for v in self.job_parameters]
+        if self.job_run_id is not None:
+            body["job_run_id"] = self.job_run_id
+        if self.number_in_job is not None:
+            body["number_in_job"] = self.number_in_job
         if self.original_attempt_run_id is not None:
-            body['original_attempt_run_id'] = self.original_attempt_run_id
-        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
-        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
-        if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_name is not None: body['run_name'] = self.run_name
-        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
-        if self.run_type is not None: body['run_type'] = self.run_type.value
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
-        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
-        if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
-        if self.trigger is not None: body['trigger'] = self.trigger.value
-        if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
+            body["original_attempt_run_id"] = self.original_attempt_run_id
+        if self.overriding_parameters:
+            body["overriding_parameters"] = self.overriding_parameters.as_dict()
+        if self.queue_duration is not None:
+            body["queue_duration"] = self.queue_duration
+        if self.repair_history:
+            body["repair_history"] = [v.as_dict() for v in self.repair_history]
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_name is not None:
+            body["run_name"] = self.run_name
+        if self.run_page_url is not None:
+            body["run_page_url"] = self.run_page_url
+        if self.run_type is not None:
+            body["run_type"] = self.run_type.value
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
+        if self.setup_duration is not None:
+            body["setup_duration"] = self.setup_duration
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
+        if self.tasks:
+            body["tasks"] = [v.as_dict() for v in self.tasks]
+        if self.trigger is not None:
+            body["trigger"] = self.trigger.value
+        if self.trigger_info:
+            body["trigger_info"] = self.trigger_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BaseRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
-        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
-        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
-        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.description is not None: body['description'] = self.description
+        if self.attempt_number is not None:
+            body["attempt_number"] = self.attempt_number
+        if self.cleanup_duration is not None:
+            body["cleanup_duration"] = self.cleanup_duration
+        if self.cluster_instance:
+            body["cluster_instance"] = self.cluster_instance
+        if self.cluster_spec:
+            body["cluster_spec"] = self.cluster_spec
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.description is not None:
+            body["description"] = self.description
         if self.effective_performance_target is not None:
-            body['effective_performance_target'] = self.effective_performance_target
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
-        if self.git_source: body['git_source'] = self.git_source
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.job_clusters: body['job_clusters'] = self.job_clusters
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
-        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+            body["effective_performance_target"] = self.effective_performance_target
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.execution_duration is not None:
+            body["execution_duration"] = self.execution_duration
+        if self.git_source:
+            body["git_source"] = self.git_source
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.job_clusters:
+            body["job_clusters"] = self.job_clusters
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.job_run_id is not None:
+            body["job_run_id"] = self.job_run_id
+        if self.number_in_job is not None:
+            body["number_in_job"] = self.number_in_job
         if self.original_attempt_run_id is not None:
-            body['original_attempt_run_id'] = self.original_attempt_run_id
-        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters
-        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
-        if self.repair_history: body['repair_history'] = self.repair_history
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_name is not None: body['run_name'] = self.run_name
-        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
-        if self.run_type is not None: body['run_type'] = self.run_type
-        if self.schedule: body['schedule'] = self.schedule
-        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state
-        if self.status: body['status'] = self.status
-        if self.tasks: body['tasks'] = self.tasks
-        if self.trigger is not None: body['trigger'] = self.trigger
-        if self.trigger_info: body['trigger_info'] = self.trigger_info
+            body["original_attempt_run_id"] = self.original_attempt_run_id
+        if self.overriding_parameters:
+            body["overriding_parameters"] = self.overriding_parameters
+        if self.queue_duration is not None:
+            body["queue_duration"] = self.queue_duration
+        if self.repair_history:
+            body["repair_history"] = self.repair_history
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_name is not None:
+            body["run_name"] = self.run_name
+        if self.run_page_url is not None:
+            body["run_page_url"] = self.run_page_url
+        if self.run_type is not None:
+            body["run_type"] = self.run_type
+        if self.schedule:
+            body["schedule"] = self.schedule
+        if self.setup_duration is not None:
+            body["setup_duration"] = self.setup_duration
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state
+        if self.status:
+            body["status"] = self.status
+        if self.tasks:
+            body["tasks"] = self.tasks
+        if self.trigger is not None:
+            body["trigger"] = self.trigger
+        if self.trigger_info:
+            body["trigger_info"] = self.trigger_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseRun:
         """Deserializes the BaseRun from a dictionary."""
-        return cls(attempt_number=d.get('attempt_number', None),
-                   cleanup_duration=d.get('cleanup_duration', None),
-                   cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
-                   cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
-                   creator_user_name=d.get('creator_user_name', None),
-                   description=d.get('description', None),
-                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
-                   end_time=d.get('end_time', None),
-                   execution_duration=d.get('execution_duration', None),
-                   git_source=_from_dict(d, 'git_source', GitSource),
-                   has_more=d.get('has_more', None),
-                   job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
-                   job_id=d.get('job_id', None),
-                   job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
-                   job_run_id=d.get('job_run_id', None),
-                   number_in_job=d.get('number_in_job', None),
-                   original_attempt_run_id=d.get('original_attempt_run_id', None),
-                   overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
-                   queue_duration=d.get('queue_duration', None),
-                   repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
-                   run_duration=d.get('run_duration', None),
-                   run_id=d.get('run_id', None),
-                   run_name=d.get('run_name', None),
-                   run_page_url=d.get('run_page_url', None),
-                   run_type=_enum(d, 'run_type', RunType),
-                   schedule=_from_dict(d, 'schedule', CronSchedule),
-                   setup_duration=d.get('setup_duration', None),
-                   start_time=d.get('start_time', None),
-                   state=_from_dict(d, 'state', RunState),
-                   status=_from_dict(d, 'status', RunStatus),
-                   tasks=_repeated_dict(d, 'tasks', RunTask),
-                   trigger=_enum(d, 'trigger', TriggerType),
-                   trigger_info=_from_dict(d, 'trigger_info', TriggerInfo))
+        return cls(
+            attempt_number=d.get("attempt_number", None),
+            cleanup_duration=d.get("cleanup_duration", None),
+            cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance),
+            cluster_spec=_from_dict(d, "cluster_spec", ClusterSpec),
+            creator_user_name=d.get("creator_user_name", None),
+            description=d.get("description", None),
+            effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
+            end_time=d.get("end_time", None),
+            execution_duration=d.get("execution_duration", None),
+            git_source=_from_dict(d, "git_source", GitSource),
+            has_more=d.get("has_more", None),
+            job_clusters=_repeated_dict(d, "job_clusters", JobCluster),
+            job_id=d.get("job_id", None),
+            job_parameters=_repeated_dict(d, "job_parameters", JobParameter),
+            job_run_id=d.get("job_run_id", None),
+            number_in_job=d.get("number_in_job", None),
+            original_attempt_run_id=d.get("original_attempt_run_id", None),
+            overriding_parameters=_from_dict(d, "overriding_parameters", RunParameters),
+            queue_duration=d.get("queue_duration", None),
+            repair_history=_repeated_dict(d, "repair_history", RepairHistoryItem),
+            run_duration=d.get("run_duration", None),
+            run_id=d.get("run_id", None),
+            run_name=d.get("run_name", None),
+            run_page_url=d.get("run_page_url", None),
+            run_type=_enum(d, "run_type", RunType),
+            schedule=_from_dict(d, "schedule", CronSchedule),
+            setup_duration=d.get("setup_duration", None),
+            start_time=d.get("start_time", None),
+            state=_from_dict(d, "state", RunState),
+            status=_from_dict(d, "status", RunStatus),
+            tasks=_repeated_dict(d, "tasks", RunTask),
+            trigger=_enum(d, "trigger", TriggerType),
+            trigger_info=_from_dict(d, "trigger_info", TriggerInfo),
+        )
 
 
 @dataclass
@@ -367,21 +443,28 @@ class CancelAllRuns:
     def as_dict(self) -> dict:
         """Serializes the CancelAllRuns into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.all_queued_runs is not None:
+            body["all_queued_runs"] = self.all_queued_runs
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.all_queued_runs is not None:
+            body["all_queued_runs"] = self.all_queued_runs
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelAllRuns:
         """Deserializes the CancelAllRuns from a dictionary."""
-        return cls(all_queued_runs=d.get('all_queued_runs', None), job_id=d.get('job_id', None))
+        return cls(
+            all_queued_runs=d.get("all_queued_runs", None),
+            job_id=d.get("job_id", None),
+        )
 
 
 @dataclass
@@ -411,19 +494,21 @@ class CancelRun:
     def as_dict(self) -> dict:
         """Serializes the CancelRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CancelRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRun:
         """Deserializes the CancelRun from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -449,35 +534,35 @@ class CleanRoomTaskRunLifeCycleState(Enum):
     """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to
     remove coupling with jobs API definition"""
 
-    BLOCKED = 'BLOCKED'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    PENDING = 'PENDING'
-    QUEUED = 'QUEUED'
-    RUNNING = 'RUNNING'
-    RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED'
-    SKIPPED = 'SKIPPED'
-    TERMINATED = 'TERMINATED'
-    TERMINATING = 'TERMINATING'
-    WAITING_FOR_RETRY = 'WAITING_FOR_RETRY'
+    BLOCKED = "BLOCKED"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    PENDING = "PENDING"
+    QUEUED = "QUEUED"
+    RUNNING = "RUNNING"
+    RUN_LIFE_CYCLE_STATE_UNSPECIFIED = "RUN_LIFE_CYCLE_STATE_UNSPECIFIED"
+    SKIPPED = "SKIPPED"
+    TERMINATED = "TERMINATED"
+    TERMINATING = "TERMINATING"
+    WAITING_FOR_RETRY = "WAITING_FOR_RETRY"
 
 
 class CleanRoomTaskRunResultState(Enum):
     """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid
     cyclic dependency."""
 
-    CANCELED = 'CANCELED'
-    DISABLED = 'DISABLED'
-    EVICTED = 'EVICTED'
-    EXCLUDED = 'EXCLUDED'
-    FAILED = 'FAILED'
-    MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
-    RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED'
-    SUCCESS = 'SUCCESS'
-    SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
-    TIMEDOUT = 'TIMEDOUT'
-    UPSTREAM_CANCELED = 'UPSTREAM_CANCELED'
-    UPSTREAM_EVICTED = 'UPSTREAM_EVICTED'
-    UPSTREAM_FAILED = 'UPSTREAM_FAILED'
+    CANCELED = "CANCELED"
+    DISABLED = "DISABLED"
+    EVICTED = "EVICTED"
+    EXCLUDED = "EXCLUDED"
+    FAILED = "FAILED"
+    MAXIMUM_CONCURRENT_RUNS_REACHED = "MAXIMUM_CONCURRENT_RUNS_REACHED"
+    RUN_RESULT_STATE_UNSPECIFIED = "RUN_RESULT_STATE_UNSPECIFIED"
+    SUCCESS = "SUCCESS"
+    SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES"
+    TIMEDOUT = "TIMEDOUT"
+    UPSTREAM_CANCELED = "UPSTREAM_CANCELED"
+    UPSTREAM_EVICTED = "UPSTREAM_EVICTED"
+    UPSTREAM_FAILED = "UPSTREAM_FAILED"
 
 
 @dataclass
@@ -494,22 +579,28 @@ class CleanRoomTaskRunState:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomTaskRunState into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value
-        if self.result_state is not None: body['result_state'] = self.result_state.value
+        if self.life_cycle_state is not None:
+            body["life_cycle_state"] = self.life_cycle_state.value
+        if self.result_state is not None:
+            body["result_state"] = self.result_state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomTaskRunState into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state
-        if self.result_state is not None: body['result_state'] = self.result_state
+        if self.life_cycle_state is not None:
+            body["life_cycle_state"] = self.life_cycle_state
+        if self.result_state is not None:
+            body["result_state"] = self.result_state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomTaskRunState:
         """Deserializes the CleanRoomTaskRunState from a dictionary."""
-        return cls(life_cycle_state=_enum(d, 'life_cycle_state', CleanRoomTaskRunLifeCycleState),
-                   result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState))
+        return cls(
+            life_cycle_state=_enum(d, "life_cycle_state", CleanRoomTaskRunLifeCycleState),
+            result_state=_enum(d, "result_state", CleanRoomTaskRunResultState),
+        )
 
 
 @dataclass
@@ -530,28 +621,38 @@ class CleanRoomsNotebookTask:
     def as_dict(self) -> dict:
         """Serializes the CleanRoomsNotebookTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters
-        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.clean_room_name is not None:
+            body["clean_room_name"] = self.clean_room_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.notebook_base_parameters:
+            body["notebook_base_parameters"] = self.notebook_base_parameters
+        if self.notebook_name is not None:
+            body["notebook_name"] = self.notebook_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomsNotebookTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters
-        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.clean_room_name is not None:
+            body["clean_room_name"] = self.clean_room_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.notebook_base_parameters:
+            body["notebook_base_parameters"] = self.notebook_base_parameters
+        if self.notebook_name is not None:
+            body["notebook_name"] = self.notebook_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTask:
         """Deserializes the CleanRoomsNotebookTask from a dictionary."""
-        return cls(clean_room_name=d.get('clean_room_name', None),
-                   etag=d.get('etag', None),
-                   notebook_base_parameters=d.get('notebook_base_parameters', None),
-                   notebook_name=d.get('notebook_name', None))
+        return cls(
+            clean_room_name=d.get("clean_room_name", None),
+            etag=d.get("etag", None),
+            notebook_base_parameters=d.get("notebook_base_parameters", None),
+            notebook_name=d.get("notebook_name", None),
+        )
 
 
 @dataclass
@@ -569,25 +670,32 @@ def as_dict(self) -> dict:
         """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.clean_room_job_run_state:
-            body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict()
-        if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict()
-        if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict()
+            body["clean_room_job_run_state"] = self.clean_room_job_run_state.as_dict()
+        if self.notebook_output:
+            body["notebook_output"] = self.notebook_output.as_dict()
+        if self.output_schema_info:
+            body["output_schema_info"] = self.output_schema_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state
-        if self.notebook_output: body['notebook_output'] = self.notebook_output
-        if self.output_schema_info: body['output_schema_info'] = self.output_schema_info
+        if self.clean_room_job_run_state:
+            body["clean_room_job_run_state"] = self.clean_room_job_run_state
+        if self.notebook_output:
+            body["notebook_output"] = self.notebook_output
+        if self.output_schema_info:
+            body["output_schema_info"] = self.output_schema_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
         """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary."""
-        return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState),
-                   notebook_output=_from_dict(d, 'notebook_output', NotebookOutput),
-                   output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo))
+        return cls(
+            clean_room_job_run_state=_from_dict(d, "clean_room_job_run_state", CleanRoomTaskRunState),
+            notebook_output=_from_dict(d, "notebook_output", NotebookOutput),
+            output_schema_info=_from_dict(d, "output_schema_info", OutputSchemaInfo),
+        )
 
 
 @dataclass
@@ -612,21 +720,28 @@ class ClusterInstance:
     def as_dict(self) -> dict:
         """Serializes the ClusterInstance into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.spark_context_id is not None:
+            body["spark_context_id"] = self.spark_context_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterInstance into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.spark_context_id is not None:
+            body["spark_context_id"] = self.spark_context_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterInstance:
         """Deserializes the ClusterInstance from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None), spark_context_id=d.get('spark_context_id', None))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            spark_context_id=d.get("spark_context_id", None),
+        )
 
 
 @dataclass
@@ -650,34 +765,44 @@ class ClusterSpec:
     def as_dict(self) -> dict:
         """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.libraries: body['libraries'] = self.libraries
-        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
         """Deserializes the ClusterSpec from a dictionary."""
-        return cls(existing_cluster_id=d.get('existing_cluster_id', None),
-                   job_cluster_key=d.get('job_cluster_key', None),
-                   libraries=_repeated_dict(d, 'libraries', compute.Library),
-                   new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
+        return cls(
+            existing_cluster_id=d.get("existing_cluster_id", None),
+            job_cluster_key=d.get("job_cluster_key", None),
+            libraries=_repeated_dict(d, "libraries", compute.Library),
+            new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec),
+        )
 
 
 class Condition(Enum):
 
-    ALL_UPDATED = 'ALL_UPDATED'
-    ANY_UPDATED = 'ANY_UPDATED'
+    ALL_UPDATED = "ALL_UPDATED"
+    ANY_UPDATED = "ANY_UPDATED"
 
 
 @dataclass
@@ -704,23 +829,33 @@ class ConditionTask:
     def as_dict(self) -> dict:
         """Serializes the ConditionTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.left is not None: body['left'] = self.left
-        if self.op is not None: body['op'] = self.op.value
-        if self.right is not None: body['right'] = self.right
+        if self.left is not None:
+            body["left"] = self.left
+        if self.op is not None:
+            body["op"] = self.op.value
+        if self.right is not None:
+            body["right"] = self.right
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ConditionTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.left is not None: body['left'] = self.left
-        if self.op is not None: body['op'] = self.op
-        if self.right is not None: body['right'] = self.right
+        if self.left is not None:
+            body["left"] = self.left
+        if self.op is not None:
+            body["op"] = self.op
+        if self.right is not None:
+            body["right"] = self.right
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConditionTask:
         """Deserializes the ConditionTask from a dictionary."""
-        return cls(left=d.get('left', None), op=_enum(d, 'op', ConditionTaskOp), right=d.get('right', None))
+        return cls(
+            left=d.get("left", None),
+            op=_enum(d, "op", ConditionTaskOp),
+            right=d.get("right", None),
+        )
 
 
 class ConditionTaskOp(Enum):
@@ -729,17 +864,17 @@ class ConditionTaskOp(Enum):
     `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands.
     `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to
     `false`.
-    
+
     The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`.
     If a task value was set to a boolean value, it will be serialized to `“true”` or
     `“false”` for the comparison."""
 
-    EQUAL_TO = 'EQUAL_TO'
-    GREATER_THAN = 'GREATER_THAN'
-    GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
-    LESS_THAN = 'LESS_THAN'
-    LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
-    NOT_EQUAL = 'NOT_EQUAL'
+    EQUAL_TO = "EQUAL_TO"
+    GREATER_THAN = "GREATER_THAN"
+    GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL"
+    LESS_THAN = "LESS_THAN"
+    LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL"
+    NOT_EQUAL = "NOT_EQUAL"
 
 
 @dataclass
@@ -750,19 +885,21 @@ class Continuous:
     def as_dict(self) -> dict:
         """Serializes the Continuous into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Continuous into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Continuous:
         """Deserializes the Continuous from a dictionary."""
-        return cls(pause_status=_enum(d, 'pause_status', PauseStatus))
+        return cls(pause_status=_enum(d, "pause_status", PauseStatus))
 
 
 @dataclass
@@ -887,91 +1024,142 @@ def as_dict(self) -> dict:
         """Serializes the CreateJob into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.continuous: body['continuous'] = self.continuous.as_dict()
-        if self.deployment: body['deployment'] = self.deployment.as_dict()
-        if self.description is not None: body['description'] = self.description
-        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value
-        if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
-        if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
-        if self.format is not None: body['format'] = self.format.value
-        if self.git_source: body['git_source'] = self.git_source.as_dict()
-        if self.health: body['health'] = self.health.as_dict()
-        if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
-        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
-        if self.name is not None: body['name'] = self.name
-        if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
-        if self.queue: body['queue'] = self.queue.as_dict()
-        if self.run_as: body['run_as'] = self.run_as.as_dict()
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
-        if self.tags: body['tags'] = self.tags
-        if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.trigger: body['trigger'] = self.trigger.as_dict()
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.continuous:
+            body["continuous"] = self.continuous.as_dict()
+        if self.deployment:
+            body["deployment"] = self.deployment.as_dict()
+        if self.description is not None:
+            body["description"] = self.description
+        if self.edit_mode is not None:
+            body["edit_mode"] = self.edit_mode.value
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications.as_dict()
+        if self.environments:
+            body["environments"] = [v.as_dict() for v in self.environments]
+        if self.format is not None:
+            body["format"] = self.format.value
+        if self.git_source:
+            body["git_source"] = self.git_source.as_dict()
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.job_clusters:
+            body["job_clusters"] = [v.as_dict() for v in self.job_clusters]
+        if self.max_concurrent_runs is not None:
+            body["max_concurrent_runs"] = self.max_concurrent_runs
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings.as_dict()
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.performance_target is not None:
+            body["performance_target"] = self.performance_target.value
+        if self.queue:
+            body["queue"] = self.queue.as_dict()
+        if self.run_as:
+            body["run_as"] = self.run_as.as_dict()
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
+        if self.tags:
+            body["tags"] = self.tags
+        if self.tasks:
+            body["tasks"] = [v.as_dict() for v in self.tasks]
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.trigger:
+            body["trigger"] = self.trigger.as_dict()
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateJob into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.continuous: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment
-        if self.description is not None: body['description'] = self.description
-        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode
-        if self.email_notifications: body['email_notifications'] = self.email_notifications
-        if self.environments: body['environments'] = self.environments
-        if self.format is not None: body['format'] = self.format
-        if self.git_source: body['git_source'] = self.git_source
-        if self.health: body['health'] = self.health
-        if self.job_clusters: body['job_clusters'] = self.job_clusters
-        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
-        if self.name is not None: body['name'] = self.name
-        if self.notification_settings: body['notification_settings'] = self.notification_settings
-        if self.parameters: body['parameters'] = self.parameters
-        if self.performance_target is not None: body['performance_target'] = self.performance_target
-        if self.queue: body['queue'] = self.queue
-        if self.run_as: body['run_as'] = self.run_as
-        if self.schedule: body['schedule'] = self.schedule
-        if self.tags: body['tags'] = self.tags
-        if self.tasks: body['tasks'] = self.tasks
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.trigger: body['trigger'] = self.trigger
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.continuous:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment
+        if self.description is not None:
+            body["description"] = self.description
+        if self.edit_mode is not None:
+            body["edit_mode"] = self.edit_mode
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications
+        if self.environments:
+            body["environments"] = self.environments
+        if self.format is not None:
+            body["format"] = self.format
+        if self.git_source:
+            body["git_source"] = self.git_source
+        if self.health:
+            body["health"] = self.health
+        if self.job_clusters:
+            body["job_clusters"] = self.job_clusters
+        if self.max_concurrent_runs is not None:
+            body["max_concurrent_runs"] = self.max_concurrent_runs
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.performance_target is not None:
+            body["performance_target"] = self.performance_target
+        if self.queue:
+            body["queue"] = self.queue
+        if self.run_as:
+            body["run_as"] = self.run_as
+        if self.schedule:
+            body["schedule"] = self.schedule
+        if self.tags:
+            body["tags"] = self.tags
+        if self.tasks:
+            body["tasks"] = self.tasks
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.trigger:
+            body["trigger"] = self.trigger
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateJob:
         """Deserializes the CreateJob from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
-                   budget_policy_id=d.get('budget_policy_id', None),
-                   continuous=_from_dict(d, 'continuous', Continuous),
-                   deployment=_from_dict(d, 'deployment', JobDeployment),
-                   description=d.get('description', None),
-                   edit_mode=_enum(d, 'edit_mode', JobEditMode),
-                   email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
-                   environments=_repeated_dict(d, 'environments', JobEnvironment),
-                   format=_enum(d, 'format', Format),
-                   git_source=_from_dict(d, 'git_source', GitSource),
-                   health=_from_dict(d, 'health', JobsHealthRules),
-                   job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
-                   max_concurrent_runs=d.get('max_concurrent_runs', None),
-                   name=d.get('name', None),
-                   notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
-                   parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
-                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
-                   queue=_from_dict(d, 'queue', QueueSettings),
-                   run_as=_from_dict(d, 'run_as', JobRunAs),
-                   schedule=_from_dict(d, 'schedule', CronSchedule),
-                   tags=d.get('tags', None),
-                   tasks=_repeated_dict(d, 'tasks', Task),
-                   timeout_seconds=d.get('timeout_seconds', None),
-                   trigger=_from_dict(d, 'trigger', TriggerSettings),
-                   webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest),
+            budget_policy_id=d.get("budget_policy_id", None),
+            continuous=_from_dict(d, "continuous", Continuous),
+            deployment=_from_dict(d, "deployment", JobDeployment),
+            description=d.get("description", None),
+            edit_mode=_enum(d, "edit_mode", JobEditMode),
+            email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
+            environments=_repeated_dict(d, "environments", JobEnvironment),
+            format=_enum(d, "format", Format),
+            git_source=_from_dict(d, "git_source", GitSource),
+            health=_from_dict(d, "health", JobsHealthRules),
+            job_clusters=_repeated_dict(d, "job_clusters", JobCluster),
+            max_concurrent_runs=d.get("max_concurrent_runs", None),
+            name=d.get("name", None),
+            notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings),
+            parameters=_repeated_dict(d, "parameters", JobParameterDefinition),
+            performance_target=_enum(d, "performance_target", PerformanceTarget),
+            queue=_from_dict(d, "queue", QueueSettings),
+            run_as=_from_dict(d, "run_as", JobRunAs),
+            schedule=_from_dict(d, "schedule", CronSchedule),
+            tags=d.get("tags", None),
+            tasks=_repeated_dict(d, "tasks", Task),
+            timeout_seconds=d.get("timeout_seconds", None),
+            trigger=_from_dict(d, "trigger", TriggerSettings),
+            webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
+        )
 
 
 @dataclass
@@ -984,19 +1172,21 @@ class CreateResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
-        return cls(job_id=d.get('job_id', None))
+        return cls(job_id=d.get("job_id", None))
 
 
 @dataclass
@@ -1019,27 +1209,33 @@ class CronSchedule:
     def as_dict(self) -> dict:
         """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status.value
         if self.quartz_cron_expression is not None:
-            body['quartz_cron_expression'] = self.quartz_cron_expression
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+            body["quartz_cron_expression"] = self.quartz_cron_expression
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CronSchedule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status
         if self.quartz_cron_expression is not None:
-            body['quartz_cron_expression'] = self.quartz_cron_expression
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+            body["quartz_cron_expression"] = self.quartz_cron_expression
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronSchedule:
         """Deserializes the CronSchedule from a dictionary."""
-        return cls(pause_status=_enum(d, 'pause_status', PauseStatus),
-                   quartz_cron_expression=d.get('quartz_cron_expression', None),
-                   timezone_id=d.get('timezone_id', None))
+        return cls(
+            pause_status=_enum(d, "pause_status", PauseStatus),
+            quartz_cron_expression=d.get("quartz_cron_expression", None),
+            timezone_id=d.get("timezone_id", None),
+        )
 
 
 @dataclass
@@ -1054,22 +1250,28 @@ class DbtOutput:
     def as_dict(self) -> dict:
         """Serializes the DbtOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers
-        if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link
+        if self.artifacts_headers:
+            body["artifacts_headers"] = self.artifacts_headers
+        if self.artifacts_link is not None:
+            body["artifacts_link"] = self.artifacts_link
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DbtOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers
-        if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link
+        if self.artifacts_headers:
+            body["artifacts_headers"] = self.artifacts_headers
+        if self.artifacts_link is not None:
+            body["artifacts_link"] = self.artifacts_link
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbtOutput:
         """Deserializes the DbtOutput from a dictionary."""
-        return cls(artifacts_headers=d.get('artifacts_headers', None),
-                   artifacts_link=d.get('artifacts_link', None))
+        return cls(
+            artifacts_headers=d.get("artifacts_headers", None),
+            artifacts_link=d.get("artifacts_link", None),
+        )
 
 
 @dataclass
@@ -1112,37 +1314,53 @@ class DbtTask:
     def as_dict(self) -> dict:
         """Serializes the DbtTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.commands: body['commands'] = [v for v in self.commands]
-        if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory
-        if self.project_directory is not None: body['project_directory'] = self.project_directory
-        if self.schema is not None: body['schema'] = self.schema
-        if self.source is not None: body['source'] = self.source.value
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.commands:
+            body["commands"] = [v for v in self.commands]
+        if self.profiles_directory is not None:
+            body["profiles_directory"] = self.profiles_directory
+        if self.project_directory is not None:
+            body["project_directory"] = self.project_directory
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.source is not None:
+            body["source"] = self.source.value
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DbtTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.commands: body['commands'] = self.commands
-        if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory
-        if self.project_directory is not None: body['project_directory'] = self.project_directory
-        if self.schema is not None: body['schema'] = self.schema
-        if self.source is not None: body['source'] = self.source
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.commands:
+            body["commands"] = self.commands
+        if self.profiles_directory is not None:
+            body["profiles_directory"] = self.profiles_directory
+        if self.project_directory is not None:
+            body["project_directory"] = self.project_directory
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.source is not None:
+            body["source"] = self.source
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbtTask:
         """Deserializes the DbtTask from a dictionary."""
-        return cls(catalog=d.get('catalog', None),
-                   commands=d.get('commands', None),
-                   profiles_directory=d.get('profiles_directory', None),
-                   project_directory=d.get('project_directory', None),
-                   schema=d.get('schema', None),
-                   source=_enum(d, 'source', Source),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            catalog=d.get("catalog", None),
+            commands=d.get("commands", None),
+            profiles_directory=d.get("profiles_directory", None),
+            project_directory=d.get("project_directory", None),
+            schema=d.get("schema", None),
+            source=_enum(d, "source", Source),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -1153,19 +1371,21 @@ class DeleteJob:
     def as_dict(self) -> dict:
         """Serializes the DeleteJob into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteJob into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteJob:
         """Deserializes the DeleteJob from a dictionary."""
-        return cls(job_id=d.get('job_id', None))
+        return cls(job_id=d.get("job_id", None))
 
 
 @dataclass
@@ -1195,19 +1415,21 @@ class DeleteRun:
     def as_dict(self) -> dict:
         """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRun:
         """Deserializes the DeleteRun from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -1250,25 +1472,33 @@ class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
     def as_dict(self) -> dict:
         """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.field is not None: body['field'] = self.field
-        if self.new_value is not None: body['new_value'] = self.new_value
-        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        if self.field is not None:
+            body["field"] = self.field
+        if self.new_value is not None:
+            body["new_value"] = self.new_value
+        if self.previous_value is not None:
+            body["previous_value"] = self.previous_value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.field is not None: body['field'] = self.field
-        if self.new_value is not None: body['new_value'] = self.new_value
-        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        if self.field is not None:
+            body["field"] = self.field
+        if self.new_value is not None:
+            body["new_value"] = self.new_value
+        if self.previous_value is not None:
+            body["previous_value"] = self.previous_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
         """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary."""
-        return cls(field=d.get('field', None),
-                   new_value=d.get('new_value', None),
-                   previous_value=d.get('previous_value', None))
+        return cls(
+            field=d.get("field", None),
+            new_value=d.get("new_value", None),
+            previous_value=d.get("previous_value", None),
+        )
 
 
 @dataclass
@@ -1282,21 +1512,28 @@ class EnforcePolicyComplianceRequest:
     def as_dict(self) -> dict:
         """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest:
         """Deserializes the EnforcePolicyComplianceRequest from a dictionary."""
-        return cls(job_id=d.get('job_id', None), validate_only=d.get('validate_only', None))
+        return cls(
+            job_id=d.get("job_id", None),
+            validate_only=d.get("validate_only", None),
+        )
 
 
 @dataclass
@@ -1318,28 +1555,37 @@ class EnforcePolicyComplianceResponse:
     def as_dict(self) -> dict:
         """Serializes the EnforcePolicyComplianceResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        if self.has_changes is not None:
+            body["has_changes"] = self.has_changes
         if self.job_cluster_changes:
-            body['job_cluster_changes'] = [v.as_dict() for v in self.job_cluster_changes]
-        if self.settings: body['settings'] = self.settings.as_dict()
+            body["job_cluster_changes"] = [v.as_dict() for v in self.job_cluster_changes]
+        if self.settings:
+            body["settings"] = self.settings.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnforcePolicyComplianceResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.has_changes is not None: body['has_changes'] = self.has_changes
-        if self.job_cluster_changes: body['job_cluster_changes'] = self.job_cluster_changes
-        if self.settings: body['settings'] = self.settings
+        if self.has_changes is not None:
+            body["has_changes"] = self.has_changes
+        if self.job_cluster_changes:
+            body["job_cluster_changes"] = self.job_cluster_changes
+        if self.settings:
+            body["settings"] = self.settings
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse:
         """Deserializes the EnforcePolicyComplianceResponse from a dictionary."""
-        return cls(has_changes=d.get('has_changes', None),
-                   job_cluster_changes=_repeated_dict(
-                       d, 'job_cluster_changes',
-                       EnforcePolicyComplianceForJobResponseJobClusterSettingsChange),
-                   settings=_from_dict(d, 'settings', JobSettings))
+        return cls(
+            has_changes=d.get("has_changes", None),
+            job_cluster_changes=_repeated_dict(
+                d,
+                "job_cluster_changes",
+                EnforcePolicyComplianceForJobResponseJobClusterSettingsChange,
+            ),
+            settings=_from_dict(d, "settings", JobSettings),
+        )
 
 
 @dataclass
@@ -1355,19 +1601,21 @@ class ExportRunOutput:
     def as_dict(self) -> dict:
         """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.views: body['views'] = [v.as_dict() for v in self.views]
+        if self.views:
+            body["views"] = [v.as_dict() for v in self.views]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExportRunOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.views: body['views'] = self.views
+        if self.views:
+            body["views"] = self.views
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportRunOutput:
         """Deserializes the ExportRunOutput from a dictionary."""
-        return cls(views=_repeated_dict(d, 'views', ViewItem))
+        return cls(views=_repeated_dict(d, "views", ViewItem))
 
 
 @dataclass
@@ -1389,28 +1637,32 @@ def as_dict(self) -> dict:
         """Serializes the FileArrivalTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.min_time_between_triggers_seconds is not None:
-            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
-        if self.url is not None: body['url'] = self.url
+            body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds
+        if self.url is not None:
+            body["url"] = self.url
         if self.wait_after_last_change_seconds is not None:
-            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+            body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FileArrivalTriggerConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.min_time_between_triggers_seconds is not None:
-            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
-        if self.url is not None: body['url'] = self.url
+            body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds
+        if self.url is not None:
+            body["url"] = self.url
         if self.wait_after_last_change_seconds is not None:
-            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+            body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileArrivalTriggerConfiguration:
         """Deserializes the FileArrivalTriggerConfiguration from a dictionary."""
-        return cls(min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None),
-                   url=d.get('url', None),
-                   wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None))
+        return cls(
+            min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None),
+            url=d.get("url", None),
+            wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None),
+        )
 
 
 @dataclass
@@ -1425,22 +1677,27 @@ def as_dict(self) -> dict:
         """Serializes the ForEachStats into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.error_message_stats:
-            body['error_message_stats'] = [v.as_dict() for v in self.error_message_stats]
-        if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict()
+            body["error_message_stats"] = [v.as_dict() for v in self.error_message_stats]
+        if self.task_run_stats:
+            body["task_run_stats"] = self.task_run_stats.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ForEachStats into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error_message_stats: body['error_message_stats'] = self.error_message_stats
-        if self.task_run_stats: body['task_run_stats'] = self.task_run_stats
+        if self.error_message_stats:
+            body["error_message_stats"] = self.error_message_stats
+        if self.task_run_stats:
+            body["task_run_stats"] = self.task_run_stats
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachStats:
         """Deserializes the ForEachStats from a dictionary."""
-        return cls(error_message_stats=_repeated_dict(d, 'error_message_stats', ForEachTaskErrorMessageStats),
-                   task_run_stats=_from_dict(d, 'task_run_stats', ForEachTaskTaskRunStats))
+        return cls(
+            error_message_stats=_repeated_dict(d, "error_message_stats", ForEachTaskErrorMessageStats),
+            task_run_stats=_from_dict(d, "task_run_stats", ForEachTaskTaskRunStats),
+        )
 
 
 @dataclass
@@ -1458,25 +1715,33 @@ class ForEachTask:
     def as_dict(self) -> dict:
         """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.concurrency is not None: body['concurrency'] = self.concurrency
-        if self.inputs is not None: body['inputs'] = self.inputs
-        if self.task: body['task'] = self.task.as_dict()
+        if self.concurrency is not None:
+            body["concurrency"] = self.concurrency
+        if self.inputs is not None:
+            body["inputs"] = self.inputs
+        if self.task:
+            body["task"] = self.task.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ForEachTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.concurrency is not None: body['concurrency'] = self.concurrency
-        if self.inputs is not None: body['inputs'] = self.inputs
-        if self.task: body['task'] = self.task
+        if self.concurrency is not None:
+            body["concurrency"] = self.concurrency
+        if self.inputs is not None:
+            body["inputs"] = self.inputs
+        if self.task:
+            body["task"] = self.task
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTask:
         """Deserializes the ForEachTask from a dictionary."""
-        return cls(concurrency=d.get('concurrency', None),
-                   inputs=d.get('inputs', None),
-                   task=_from_dict(d, 'task', Task))
+        return cls(
+            concurrency=d.get("concurrency", None),
+            inputs=d.get("inputs", None),
+            task=_from_dict(d, "task", Task),
+        )
 
 
 @dataclass
@@ -1493,25 +1758,33 @@ class ForEachTaskErrorMessageStats:
     def as_dict(self) -> dict:
         """Serializes the ForEachTaskErrorMessageStats into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.termination_category is not None: body['termination_category'] = self.termination_category
+        if self.count is not None:
+            body["count"] = self.count
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.termination_category is not None:
+            body["termination_category"] = self.termination_category
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ForEachTaskErrorMessageStats into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.termination_category is not None: body['termination_category'] = self.termination_category
+        if self.count is not None:
+            body["count"] = self.count
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.termination_category is not None:
+            body["termination_category"] = self.termination_category
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats:
         """Deserializes the ForEachTaskErrorMessageStats from a dictionary."""
-        return cls(count=d.get('count', None),
-                   error_message=d.get('error_message', None),
-                   termination_category=d.get('termination_category', None))
+        return cls(
+            count=d.get("count", None),
+            error_message=d.get("error_message", None),
+            termination_category=d.get("termination_category", None),
+        )
 
 
 @dataclass
@@ -1537,40 +1810,54 @@ class ForEachTaskTaskRunStats:
     def as_dict(self) -> dict:
         """Serializes the ForEachTaskTaskRunStats into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.active_iterations is not None: body['active_iterations'] = self.active_iterations
-        if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations
-        if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations
-        if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations
-        if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations
-        if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
+        if self.active_iterations is not None:
+            body["active_iterations"] = self.active_iterations
+        if self.completed_iterations is not None:
+            body["completed_iterations"] = self.completed_iterations
+        if self.failed_iterations is not None:
+            body["failed_iterations"] = self.failed_iterations
+        if self.scheduled_iterations is not None:
+            body["scheduled_iterations"] = self.scheduled_iterations
+        if self.succeeded_iterations is not None:
+            body["succeeded_iterations"] = self.succeeded_iterations
+        if self.total_iterations is not None:
+            body["total_iterations"] = self.total_iterations
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ForEachTaskTaskRunStats into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.active_iterations is not None: body['active_iterations'] = self.active_iterations
-        if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations
-        if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations
-        if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations
-        if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations
-        if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
+        if self.active_iterations is not None:
+            body["active_iterations"] = self.active_iterations
+        if self.completed_iterations is not None:
+            body["completed_iterations"] = self.completed_iterations
+        if self.failed_iterations is not None:
+            body["failed_iterations"] = self.failed_iterations
+        if self.scheduled_iterations is not None:
+            body["scheduled_iterations"] = self.scheduled_iterations
+        if self.succeeded_iterations is not None:
+            body["succeeded_iterations"] = self.succeeded_iterations
+        if self.total_iterations is not None:
+            body["total_iterations"] = self.total_iterations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTaskTaskRunStats:
         """Deserializes the ForEachTaskTaskRunStats from a dictionary."""
-        return cls(active_iterations=d.get('active_iterations', None),
-                   completed_iterations=d.get('completed_iterations', None),
-                   failed_iterations=d.get('failed_iterations', None),
-                   scheduled_iterations=d.get('scheduled_iterations', None),
-                   succeeded_iterations=d.get('succeeded_iterations', None),
-                   total_iterations=d.get('total_iterations', None))
+        return cls(
+            active_iterations=d.get("active_iterations", None),
+            completed_iterations=d.get("completed_iterations", None),
+            failed_iterations=d.get("failed_iterations", None),
+            scheduled_iterations=d.get("scheduled_iterations", None),
+            succeeded_iterations=d.get("succeeded_iterations", None),
+            total_iterations=d.get("total_iterations", None),
+        )
 
 
 class Format(Enum):
 
-    MULTI_TASK = 'MULTI_TASK'
-    SINGLE_TASK = 'SINGLE_TASK'
+    MULTI_TASK = "MULTI_TASK"
+    SINGLE_TASK = "SINGLE_TASK"
 
 
 @dataclass
@@ -1581,19 +1868,21 @@ class GetJobPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetJobPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetJobPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetJobPermissionLevelsResponse:
         """Deserializes the GetJobPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', JobPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", JobPermissionsDescription))
 
 
 @dataclass
@@ -1612,33 +1901,40 @@ class GetPolicyComplianceResponse:
     def as_dict(self) -> dict:
         """Serializes the GetPolicyComplianceResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.violations: body['violations'] = self.violations
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPolicyComplianceResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.violations: body['violations'] = self.violations
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse:
         """Deserializes the GetPolicyComplianceResponse from a dictionary."""
-        return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None))
+        return cls(
+            is_compliant=d.get("is_compliant", None),
+            violations=d.get("violations", None),
+        )
 
 
 class GitProvider(Enum):
 
-    AWS_CODE_COMMIT = 'awsCodeCommit'
-    AZURE_DEV_OPS_SERVICES = 'azureDevOpsServices'
-    BITBUCKET_CLOUD = 'bitbucketCloud'
-    BITBUCKET_SERVER = 'bitbucketServer'
-    GIT_HUB = 'gitHub'
-    GIT_HUB_ENTERPRISE = 'gitHubEnterprise'
-    GIT_LAB = 'gitLab'
-    GIT_LAB_ENTERPRISE_EDITION = 'gitLabEnterpriseEdition'
+    AWS_CODE_COMMIT = "awsCodeCommit"
+    AZURE_DEV_OPS_SERVICES = "azureDevOpsServices"
+    BITBUCKET_CLOUD = "bitbucketCloud"
+    BITBUCKET_SERVER = "bitbucketServer"
+    GIT_HUB = "gitHub"
+    GIT_HUB_ENTERPRISE = "gitHubEnterprise"
+    GIT_LAB = "gitLab"
+    GIT_LAB_ENTERPRISE_EDITION = "gitLabEnterpriseEdition"
 
 
 @dataclass
@@ -1654,29 +1950,31 @@ class GitSnapshot:
     def as_dict(self) -> dict:
         """Serializes the GitSnapshot into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.used_commit is not None: body['used_commit'] = self.used_commit
+        if self.used_commit is not None:
+            body["used_commit"] = self.used_commit
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GitSnapshot into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.used_commit is not None: body['used_commit'] = self.used_commit
+        if self.used_commit is not None:
+            body["used_commit"] = self.used_commit
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GitSnapshot:
         """Deserializes the GitSnapshot from a dictionary."""
-        return cls(used_commit=d.get('used_commit', None))
+        return cls(used_commit=d.get("used_commit", None))
 
 
 @dataclass
 class GitSource:
     """An optional specification for a remote Git repository containing the source code used by tasks.
     Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-    
+
     If `git_source` is set, these tasks retrieve the file from the remote repository by default.
     However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-    
+
     Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
     are used, `git_source` must be defined on the job."""
 
@@ -1708,37 +2006,53 @@ class GitSource:
     def as_dict(self) -> dict:
         """Serializes the GitSource into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.git_branch is not None: body['git_branch'] = self.git_branch
-        if self.git_commit is not None: body['git_commit'] = self.git_commit
-        if self.git_provider is not None: body['git_provider'] = self.git_provider.value
-        if self.git_snapshot: body['git_snapshot'] = self.git_snapshot.as_dict()
-        if self.git_tag is not None: body['git_tag'] = self.git_tag
-        if self.git_url is not None: body['git_url'] = self.git_url
-        if self.job_source: body['job_source'] = self.job_source.as_dict()
+        if self.git_branch is not None:
+            body["git_branch"] = self.git_branch
+        if self.git_commit is not None:
+            body["git_commit"] = self.git_commit
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider.value
+        if self.git_snapshot:
+            body["git_snapshot"] = self.git_snapshot.as_dict()
+        if self.git_tag is not None:
+            body["git_tag"] = self.git_tag
+        if self.git_url is not None:
+            body["git_url"] = self.git_url
+        if self.job_source:
+            body["job_source"] = self.job_source.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GitSource into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.git_branch is not None: body['git_branch'] = self.git_branch
-        if self.git_commit is not None: body['git_commit'] = self.git_commit
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_snapshot: body['git_snapshot'] = self.git_snapshot
-        if self.git_tag is not None: body['git_tag'] = self.git_tag
-        if self.git_url is not None: body['git_url'] = self.git_url
-        if self.job_source: body['job_source'] = self.job_source
+        if self.git_branch is not None:
+            body["git_branch"] = self.git_branch
+        if self.git_commit is not None:
+            body["git_commit"] = self.git_commit
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_snapshot:
+            body["git_snapshot"] = self.git_snapshot
+        if self.git_tag is not None:
+            body["git_tag"] = self.git_tag
+        if self.git_url is not None:
+            body["git_url"] = self.git_url
+        if self.job_source:
+            body["job_source"] = self.job_source
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GitSource:
         """Deserializes the GitSource from a dictionary."""
-        return cls(git_branch=d.get('git_branch', None),
-                   git_commit=d.get('git_commit', None),
-                   git_provider=_enum(d, 'git_provider', GitProvider),
-                   git_snapshot=_from_dict(d, 'git_snapshot', GitSnapshot),
-                   git_tag=d.get('git_tag', None),
-                   git_url=d.get('git_url', None),
-                   job_source=_from_dict(d, 'job_source', JobSource))
+        return cls(
+            git_branch=d.get("git_branch", None),
+            git_commit=d.get("git_commit", None),
+            git_provider=_enum(d, "git_provider", GitProvider),
+            git_snapshot=_from_dict(d, "git_snapshot", GitSnapshot),
+            git_tag=d.get("git_tag", None),
+            git_url=d.get("git_url", None),
+            job_source=_from_dict(d, "job_source", JobSource),
+        )
 
 
 @dataclass
@@ -1785,42 +2099,58 @@ class Job:
     def as_dict(self) -> dict:
         """Serializes the Job into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_time is not None: body['created_time'] = self.created_time
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.created_time is not None:
+            body["created_time"] = self.created_time
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
-            body['effective_budget_policy_id'] = self.effective_budget_policy_id
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
-        if self.settings: body['settings'] = self.settings.as_dict()
+            body["effective_budget_policy_id"] = self.effective_budget_policy_id
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.run_as_user_name is not None:
+            body["run_as_user_name"] = self.run_as_user_name
+        if self.settings:
+            body["settings"] = self.settings.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Job into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_time is not None: body['created_time'] = self.created_time
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.created_time is not None:
+            body["created_time"] = self.created_time
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
-            body['effective_budget_policy_id'] = self.effective_budget_policy_id
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
-        if self.settings: body['settings'] = self.settings
+            body["effective_budget_policy_id"] = self.effective_budget_policy_id
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.run_as_user_name is not None:
+            body["run_as_user_name"] = self.run_as_user_name
+        if self.settings:
+            body["settings"] = self.settings
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Job:
         """Deserializes the Job from a dictionary."""
-        return cls(created_time=d.get('created_time', None),
-                   creator_user_name=d.get('creator_user_name', None),
-                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
-                   has_more=d.get('has_more', None),
-                   job_id=d.get('job_id', None),
-                   next_page_token=d.get('next_page_token', None),
-                   run_as_user_name=d.get('run_as_user_name', None),
-                   settings=_from_dict(d, 'settings', JobSettings))
+        return cls(
+            created_time=d.get("created_time", None),
+            creator_user_name=d.get("creator_user_name", None),
+            effective_budget_policy_id=d.get("effective_budget_policy_id", None),
+            has_more=d.get("has_more", None),
+            job_id=d.get("job_id", None),
+            next_page_token=d.get("next_page_token", None),
+            run_as_user_name=d.get("run_as_user_name", None),
+            settings=_from_dict(d, "settings", JobSettings),
+        )
 
 
 @dataclass
@@ -1840,30 +2170,38 @@ class JobAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the JobAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobAccessControlRequest:
         """Deserializes the JobAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', JobPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", JobPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1886,33 +2224,43 @@ class JobAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the JobAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobAccessControlResponse:
         """Deserializes the JobAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', JobPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", JobPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1928,22 +2276,28 @@ class JobCluster:
     def as_dict(self) -> dict:
         """Serializes the JobCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobCluster into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobCluster:
         """Deserializes the JobCluster from a dictionary."""
-        return cls(job_cluster_key=d.get('job_cluster_key', None),
-                   new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec))
+        return cls(
+            job_cluster_key=d.get("job_cluster_key", None),
+            new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec),
+        )
 
 
 @dataclass
@@ -1963,25 +2317,33 @@ class JobCompliance:
     def as_dict(self) -> dict:
         """Serializes the JobCompliance into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.violations: body['violations'] = self.violations
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobCompliance into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.violations: body['violations'] = self.violations
+        if self.is_compliant is not None:
+            body["is_compliant"] = self.is_compliant
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.violations:
+            body["violations"] = self.violations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobCompliance:
         """Deserializes the JobCompliance from a dictionary."""
-        return cls(is_compliant=d.get('is_compliant', None),
-                   job_id=d.get('job_id', None),
-                   violations=d.get('violations', None))
+        return cls(
+            is_compliant=d.get("is_compliant", None),
+            job_id=d.get("job_id", None),
+            violations=d.get("violations", None),
+        )
 
 
 @dataclass
@@ -1997,38 +2359,44 @@ class JobDeployment:
     def as_dict(self) -> dict:
         """Serializes the JobDeployment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.metadata_file_path is not None:
+            body["metadata_file_path"] = self.metadata_file_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobDeployment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.kind is not None: body['kind'] = self.kind
-        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.metadata_file_path is not None:
+            body["metadata_file_path"] = self.metadata_file_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobDeployment:
         """Deserializes the JobDeployment from a dictionary."""
-        return cls(kind=_enum(d, 'kind', JobDeploymentKind),
-                   metadata_file_path=d.get('metadata_file_path', None))
+        return cls(
+            kind=_enum(d, "kind", JobDeploymentKind),
+            metadata_file_path=d.get("metadata_file_path", None),
+        )
 
 
 class JobDeploymentKind(Enum):
     """* `BUNDLE`: The job is managed by Databricks Asset Bundle."""
 
-    BUNDLE = 'BUNDLE'
+    BUNDLE = "BUNDLE"
 
 
 class JobEditMode(Enum):
     """Edit mode of the job.
-    
+
     * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is
     in an editable state and can be modified."""
 
-    EDITABLE = 'EDITABLE'
-    UI_LOCKED = 'UI_LOCKED'
+    EDITABLE = "EDITABLE"
+    UI_LOCKED = "UI_LOCKED"
 
 
 @dataclass
@@ -2070,42 +2438,47 @@ def as_dict(self) -> dict:
         """Serializes the JobEmailNotifications into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         if self.on_duration_warning_threshold_exceeded:
-            body['on_duration_warning_threshold_exceeded'] = [
-                v for v in self.on_duration_warning_threshold_exceeded
-            ]
-        if self.on_failure: body['on_failure'] = [v for v in self.on_failure]
-        if self.on_start: body['on_start'] = [v for v in self.on_start]
+            body["on_duration_warning_threshold_exceeded"] = [v for v in self.on_duration_warning_threshold_exceeded]
+        if self.on_failure:
+            body["on_failure"] = [v for v in self.on_failure]
+        if self.on_start:
+            body["on_start"] = [v for v in self.on_start]
         if self.on_streaming_backlog_exceeded:
-            body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded]
-        if self.on_success: body['on_success'] = [v for v in self.on_success]
+            body["on_streaming_backlog_exceeded"] = [v for v in self.on_streaming_backlog_exceeded]
+        if self.on_success:
+            body["on_success"] = [v for v in self.on_success]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobEmailNotifications into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         if self.on_duration_warning_threshold_exceeded:
-            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
-        if self.on_failure: body['on_failure'] = self.on_failure
-        if self.on_start: body['on_start'] = self.on_start
+            body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure:
+            body["on_failure"] = self.on_failure
+        if self.on_start:
+            body["on_start"] = self.on_start
         if self.on_streaming_backlog_exceeded:
-            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
-        if self.on_success: body['on_success'] = self.on_success
+            body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded
+        if self.on_success:
+            body["on_success"] = self.on_success
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobEmailNotifications:
         """Deserializes the JobEmailNotifications from a dictionary."""
-        return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None),
-                   on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded',
-                                                                None),
-                   on_failure=d.get('on_failure', None),
-                   on_start=d.get('on_start', None),
-                   on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None),
-                   on_success=d.get('on_success', None))
+        return cls(
+            no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None),
+            on_duration_warning_threshold_exceeded=d.get("on_duration_warning_threshold_exceeded", None),
+            on_failure=d.get("on_failure", None),
+            on_start=d.get("on_start", None),
+            on_streaming_backlog_exceeded=d.get("on_streaming_backlog_exceeded", None),
+            on_success=d.get("on_success", None),
+        )
 
 
 @dataclass
@@ -2120,22 +2493,28 @@ class JobEnvironment:
     def as_dict(self) -> dict:
         """Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.spec: body['spec'] = self.spec.as_dict()
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.spec:
+            body["spec"] = self.spec.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobEnvironment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.spec: body['spec'] = self.spec
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.spec:
+            body["spec"] = self.spec
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobEnvironment:
         """Deserializes the JobEnvironment from a dictionary."""
-        return cls(environment_key=d.get('environment_key', None),
-                   spec=_from_dict(d, 'spec', compute.Environment))
+        return cls(
+            environment_key=d.get("environment_key", None),
+            spec=_from_dict(d, "spec", compute.Environment),
+        )
 
 
 @dataclass
@@ -2152,25 +2531,27 @@ def as_dict(self) -> dict:
         """Serializes the JobNotificationSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.no_alert_for_canceled_runs is not None:
-            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+            body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobNotificationSettings into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.no_alert_for_canceled_runs is not None:
-            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+            body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobNotificationSettings:
         """Deserializes the JobNotificationSettings from a dictionary."""
-        return cls(no_alert_for_canceled_runs=d.get('no_alert_for_canceled_runs', None),
-                   no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None))
+        return cls(
+            no_alert_for_canceled_runs=d.get("no_alert_for_canceled_runs", None),
+            no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None),
+        )
 
 
 @dataclass
@@ -2187,23 +2568,33 @@ class JobParameter:
     def as_dict(self) -> dict:
         """Serializes the JobParameter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default is not None: body['default'] = self.default
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
+        if self.default is not None:
+            body["default"] = self.default
+        if self.name is not None:
+            body["name"] = self.name
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobParameter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default is not None: body['default'] = self.default
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
+        if self.default is not None:
+            body["default"] = self.default
+        if self.name is not None:
+            body["name"] = self.name
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobParameter:
         """Deserializes the JobParameter from a dictionary."""
-        return cls(default=d.get('default', None), name=d.get('name', None), value=d.get('value', None))
+        return cls(
+            default=d.get("default", None),
+            name=d.get("name", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -2217,21 +2608,25 @@ class JobParameterDefinition:
     def as_dict(self) -> dict:
         """Serializes the JobParameterDefinition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default is not None: body['default'] = self.default
-        if self.name is not None: body['name'] = self.name
+        if self.default is not None:
+            body["default"] = self.default
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobParameterDefinition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default is not None: body['default'] = self.default
-        if self.name is not None: body['name'] = self.name
+        if self.default is not None:
+            body["default"] = self.default
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobParameterDefinition:
         """Deserializes the JobParameterDefinition from a dictionary."""
-        return cls(default=d.get('default', None), name=d.get('name', None))
+        return cls(default=d.get("default", None), name=d.get("name", None))
 
 
 @dataclass
@@ -2246,34 +2641,42 @@ class JobPermission:
     def as_dict(self) -> dict:
         """Serializes the JobPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermission:
         """Deserializes the JobPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', JobPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", JobPermissionLevel),
+        )
 
 
 class JobPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
-    CAN_VIEW = 'CAN_VIEW'
-    IS_OWNER = 'IS_OWNER'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_MANAGE_RUN = "CAN_MANAGE_RUN"
+    CAN_VIEW = "CAN_VIEW"
+    IS_OWNER = "IS_OWNER"
 
 
 @dataclass
@@ -2288,25 +2691,32 @@ def as_dict(self) -> dict:
         """Serializes the JobPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissions:
         """Deserializes the JobPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -2319,22 +2729,28 @@ class JobPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the JobPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissionsDescription:
         """Deserializes the JobPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', JobPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", JobPermissionLevel),
+        )
 
 
 @dataclass
@@ -2348,30 +2764,36 @@ def as_dict(self) -> dict:
         """Serializes the JobPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.job_id is not None: body['job_id'] = self.job_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
         """Deserializes the JobPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
-                   job_id=d.get('job_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest),
+            job_id=d.get("job_id", None),
+        )
 
 
 @dataclass
 class JobRunAs:
     """Write-only setting. Specifies the user or service principal that the job runs as. If not
     specified, the job runs as the user who created the job.
-    
-    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
+
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
+    """
 
     service_principal_name: Optional[str] = None
     """Application ID of an active service principal. Setting this field requires the
@@ -2385,23 +2807,27 @@ def as_dict(self) -> dict:
         """Serializes the JobRunAs into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobRunAs into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobRunAs:
         """Deserializes the JobRunAs from a dictionary."""
-        return cls(service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -2522,88 +2948,138 @@ class JobSettings:
     def as_dict(self) -> dict:
         """Serializes the JobSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.continuous: body['continuous'] = self.continuous.as_dict()
-        if self.deployment: body['deployment'] = self.deployment.as_dict()
-        if self.description is not None: body['description'] = self.description
-        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value
-        if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
-        if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
-        if self.format is not None: body['format'] = self.format.value
-        if self.git_source: body['git_source'] = self.git_source.as_dict()
-        if self.health: body['health'] = self.health.as_dict()
-        if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
-        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
-        if self.name is not None: body['name'] = self.name
-        if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
-        if self.queue: body['queue'] = self.queue.as_dict()
-        if self.run_as: body['run_as'] = self.run_as.as_dict()
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
-        if self.tags: body['tags'] = self.tags
-        if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.trigger: body['trigger'] = self.trigger.as_dict()
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.continuous:
+            body["continuous"] = self.continuous.as_dict()
+        if self.deployment:
+            body["deployment"] = self.deployment.as_dict()
+        if self.description is not None:
+            body["description"] = self.description
+        if self.edit_mode is not None:
+            body["edit_mode"] = self.edit_mode.value
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications.as_dict()
+        if self.environments:
+            body["environments"] = [v.as_dict() for v in self.environments]
+        if self.format is not None:
+            body["format"] = self.format.value
+        if self.git_source:
+            body["git_source"] = self.git_source.as_dict()
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.job_clusters:
+            body["job_clusters"] = [v.as_dict() for v in self.job_clusters]
+        if self.max_concurrent_runs is not None:
+            body["max_concurrent_runs"] = self.max_concurrent_runs
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings.as_dict()
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.performance_target is not None:
+            body["performance_target"] = self.performance_target.value
+        if self.queue:
+            body["queue"] = self.queue.as_dict()
+        if self.run_as:
+            body["run_as"] = self.run_as.as_dict()
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
+        if self.tags:
+            body["tags"] = self.tags
+        if self.tasks:
+            body["tasks"] = [v.as_dict() for v in self.tasks]
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.trigger:
+            body["trigger"] = self.trigger.as_dict()
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobSettings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.continuous: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment
-        if self.description is not None: body['description'] = self.description
-        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode
-        if self.email_notifications: body['email_notifications'] = self.email_notifications
-        if self.environments: body['environments'] = self.environments
-        if self.format is not None: body['format'] = self.format
-        if self.git_source: body['git_source'] = self.git_source
-        if self.health: body['health'] = self.health
-        if self.job_clusters: body['job_clusters'] = self.job_clusters
-        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
-        if self.name is not None: body['name'] = self.name
-        if self.notification_settings: body['notification_settings'] = self.notification_settings
-        if self.parameters: body['parameters'] = self.parameters
-        if self.performance_target is not None: body['performance_target'] = self.performance_target
-        if self.queue: body['queue'] = self.queue
-        if self.run_as: body['run_as'] = self.run_as
-        if self.schedule: body['schedule'] = self.schedule
-        if self.tags: body['tags'] = self.tags
-        if self.tasks: body['tasks'] = self.tasks
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.trigger: body['trigger'] = self.trigger
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.continuous:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment
+        if self.description is not None:
+            body["description"] = self.description
+        if self.edit_mode is not None:
+            body["edit_mode"] = self.edit_mode
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications
+        if self.environments:
+            body["environments"] = self.environments
+        if self.format is not None:
+            body["format"] = self.format
+        if self.git_source:
+            body["git_source"] = self.git_source
+        if self.health:
+            body["health"] = self.health
+        if self.job_clusters:
+            body["job_clusters"] = self.job_clusters
+        if self.max_concurrent_runs is not None:
+            body["max_concurrent_runs"] = self.max_concurrent_runs
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.performance_target is not None:
+            body["performance_target"] = self.performance_target
+        if self.queue:
+            body["queue"] = self.queue
+        if self.run_as:
+            body["run_as"] = self.run_as
+        if self.schedule:
+            body["schedule"] = self.schedule
+        if self.tags:
+            body["tags"] = self.tags
+        if self.tasks:
+            body["tasks"] = self.tasks
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.trigger:
+            body["trigger"] = self.trigger
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSettings:
         """Deserializes the JobSettings from a dictionary."""
-        return cls(budget_policy_id=d.get('budget_policy_id', None),
-                   continuous=_from_dict(d, 'continuous', Continuous),
-                   deployment=_from_dict(d, 'deployment', JobDeployment),
-                   description=d.get('description', None),
-                   edit_mode=_enum(d, 'edit_mode', JobEditMode),
-                   email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
-                   environments=_repeated_dict(d, 'environments', JobEnvironment),
-                   format=_enum(d, 'format', Format),
-                   git_source=_from_dict(d, 'git_source', GitSource),
-                   health=_from_dict(d, 'health', JobsHealthRules),
-                   job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
-                   max_concurrent_runs=d.get('max_concurrent_runs', None),
-                   name=d.get('name', None),
-                   notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
-                   parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
-                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
-                   queue=_from_dict(d, 'queue', QueueSettings),
-                   run_as=_from_dict(d, 'run_as', JobRunAs),
-                   schedule=_from_dict(d, 'schedule', CronSchedule),
-                   tags=d.get('tags', None),
-                   tasks=_repeated_dict(d, 'tasks', Task),
-                   timeout_seconds=d.get('timeout_seconds', None),
-                   trigger=_from_dict(d, 'trigger', TriggerSettings),
-                   webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
+        return cls(
+            budget_policy_id=d.get("budget_policy_id", None),
+            continuous=_from_dict(d, "continuous", Continuous),
+            deployment=_from_dict(d, "deployment", JobDeployment),
+            description=d.get("description", None),
+            edit_mode=_enum(d, "edit_mode", JobEditMode),
+            email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
+            environments=_repeated_dict(d, "environments", JobEnvironment),
+            format=_enum(d, "format", Format),
+            git_source=_from_dict(d, "git_source", GitSource),
+            health=_from_dict(d, "health", JobsHealthRules),
+            job_clusters=_repeated_dict(d, "job_clusters", JobCluster),
+            max_concurrent_runs=d.get("max_concurrent_runs", None),
+            name=d.get("name", None),
+            notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings),
+            parameters=_repeated_dict(d, "parameters", JobParameterDefinition),
+            performance_target=_enum(d, "performance_target", PerformanceTarget),
+            queue=_from_dict(d, "queue", QueueSettings),
+            run_as=_from_dict(d, "run_as", JobRunAs),
+            schedule=_from_dict(d, "schedule", CronSchedule),
+            tags=d.get("tags", None),
+            tasks=_repeated_dict(d, "tasks", Task),
+            timeout_seconds=d.get("timeout_seconds", None),
+            trigger=_from_dict(d, "trigger", TriggerSettings),
+            webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
+        )
 
 
 @dataclass
@@ -2629,46 +3105,52 @@ class JobSource:
     def as_dict(self) -> dict:
         """Serializes the JobSource into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dirty_state is not None: body['dirty_state'] = self.dirty_state.value
+        if self.dirty_state is not None:
+            body["dirty_state"] = self.dirty_state.value
         if self.import_from_git_branch is not None:
-            body['import_from_git_branch'] = self.import_from_git_branch
-        if self.job_config_path is not None: body['job_config_path'] = self.job_config_path
+            body["import_from_git_branch"] = self.import_from_git_branch
+        if self.job_config_path is not None:
+            body["job_config_path"] = self.job_config_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobSource into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dirty_state is not None: body['dirty_state'] = self.dirty_state
+        if self.dirty_state is not None:
+            body["dirty_state"] = self.dirty_state
         if self.import_from_git_branch is not None:
-            body['import_from_git_branch'] = self.import_from_git_branch
-        if self.job_config_path is not None: body['job_config_path'] = self.job_config_path
+            body["import_from_git_branch"] = self.import_from_git_branch
+        if self.job_config_path is not None:
+            body["job_config_path"] = self.job_config_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSource:
         """Deserializes the JobSource from a dictionary."""
-        return cls(dirty_state=_enum(d, 'dirty_state', JobSourceDirtyState),
-                   import_from_git_branch=d.get('import_from_git_branch', None),
-                   job_config_path=d.get('job_config_path', None))
+        return cls(
+            dirty_state=_enum(d, "dirty_state", JobSourceDirtyState),
+            import_from_git_branch=d.get("import_from_git_branch", None),
+            job_config_path=d.get("job_config_path", None),
+        )
 
 
 class JobSourceDirtyState(Enum):
     """Dirty state indicates the job is not fully synced with the job specification in the remote
     repository.
-    
+
     Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job
     specification. Import the remote job specification from UI to make the job fully synced. *
     `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is
     allowed for live edit. Import the remote job specification again from UI to make the job fully
     synced."""
 
-    DISCONNECTED = 'DISCONNECTED'
-    NOT_SYNCED = 'NOT_SYNCED'
+    DISCONNECTED = "DISCONNECTED"
+    NOT_SYNCED = "NOT_SYNCED"
 
 
 class JobsHealthMetric(Enum):
     """Specifies the health metric that is being evaluated for a particular health rule.
-    
+
     * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
     An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
     is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
@@ -2677,17 +3159,17 @@ class JobsHealthMetric(Enum):
     `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
     streams. This metric is in Public Preview."""
 
-    RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS'
-    STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES'
-    STREAMING_BACKLOG_FILES = 'STREAMING_BACKLOG_FILES'
-    STREAMING_BACKLOG_RECORDS = 'STREAMING_BACKLOG_RECORDS'
-    STREAMING_BACKLOG_SECONDS = 'STREAMING_BACKLOG_SECONDS'
+    RUN_DURATION_SECONDS = "RUN_DURATION_SECONDS"
+    STREAMING_BACKLOG_BYTES = "STREAMING_BACKLOG_BYTES"
+    STREAMING_BACKLOG_FILES = "STREAMING_BACKLOG_FILES"
+    STREAMING_BACKLOG_RECORDS = "STREAMING_BACKLOG_RECORDS"
+    STREAMING_BACKLOG_SECONDS = "STREAMING_BACKLOG_SECONDS"
 
 
 class JobsHealthOperator(Enum):
     """Specifies the operator used to compare the health metric value with the specified threshold."""
 
-    GREATER_THAN = 'GREATER_THAN'
+    GREATER_THAN = "GREATER_THAN"
 
 
 @dataclass
@@ -2712,25 +3194,33 @@ class JobsHealthRule:
     def as_dict(self) -> dict:
         """Serializes the JobsHealthRule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metric is not None: body['metric'] = self.metric.value
-        if self.op is not None: body['op'] = self.op.value
-        if self.value is not None: body['value'] = self.value
+        if self.metric is not None:
+            body["metric"] = self.metric.value
+        if self.op is not None:
+            body["op"] = self.op.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobsHealthRule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metric is not None: body['metric'] = self.metric
-        if self.op is not None: body['op'] = self.op
-        if self.value is not None: body['value'] = self.value
+        if self.metric is not None:
+            body["metric"] = self.metric
+        if self.op is not None:
+            body["op"] = self.op
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobsHealthRule:
         """Deserializes the JobsHealthRule from a dictionary."""
-        return cls(metric=_enum(d, 'metric', JobsHealthMetric),
-                   op=_enum(d, 'op', JobsHealthOperator),
-                   value=d.get('value', None))
+        return cls(
+            metric=_enum(d, "metric", JobsHealthMetric),
+            op=_enum(d, "op", JobsHealthOperator),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -2742,19 +3232,21 @@ class JobsHealthRules:
     def as_dict(self) -> dict:
         """Serializes the JobsHealthRules into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.rules: body['rules'] = [v.as_dict() for v in self.rules]
+        if self.rules:
+            body["rules"] = [v.as_dict() for v in self.rules]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobsHealthRules into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.rules: body['rules'] = self.rules
+        if self.rules:
+            body["rules"] = self.rules
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobsHealthRules:
         """Deserializes the JobsHealthRules from a dictionary."""
-        return cls(rules=_repeated_dict(d, 'rules', JobsHealthRule))
+        return cls(rules=_repeated_dict(d, "rules", JobsHealthRule))
 
 
 @dataclass
@@ -2773,25 +3265,33 @@ class ListJobComplianceForPolicyResponse:
     def as_dict(self) -> dict:
         """Serializes the ListJobComplianceForPolicyResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.jobs:
+            body["jobs"] = [v.as_dict() for v in self.jobs]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListJobComplianceForPolicyResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.jobs: body['jobs'] = self.jobs
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.jobs:
+            body["jobs"] = self.jobs
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse:
         """Deserializes the ListJobComplianceForPolicyResponse from a dictionary."""
-        return cls(jobs=_repeated_dict(d, 'jobs', JobCompliance),
-                   next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None))
+        return cls(
+            jobs=_repeated_dict(d, "jobs", JobCompliance),
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+        )
 
 
 @dataclass
@@ -2813,28 +3313,38 @@ class ListJobsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListJobsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.jobs:
+            body["jobs"] = [v.as_dict() for v in self.jobs]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListJobsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.jobs: body['jobs'] = self.jobs
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.jobs:
+            body["jobs"] = self.jobs
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListJobsResponse:
         """Deserializes the ListJobsResponse from a dictionary."""
-        return cls(has_more=d.get('has_more', None),
-                   jobs=_repeated_dict(d, 'jobs', BaseJob),
-                   next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None))
+        return cls(
+            has_more=d.get("has_more", None),
+            jobs=_repeated_dict(d, "jobs", BaseJob),
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+        )
 
 
 @dataclass
@@ -2857,28 +3367,38 @@ class ListRunsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListRunsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
-        if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
+        if self.runs:
+            body["runs"] = [v.as_dict() for v in self.runs]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListRunsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
-        if self.runs: body['runs'] = self.runs
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
+        if self.runs:
+            body["runs"] = self.runs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRunsResponse:
         """Deserializes the ListRunsResponse from a dictionary."""
-        return cls(has_more=d.get('has_more', None),
-                   next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None),
-                   runs=_repeated_dict(d, 'runs', BaseRun))
+        return cls(
+            has_more=d.get("has_more", None),
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+            runs=_repeated_dict(d, "runs", BaseRun),
+        )
 
 
 @dataclass
@@ -2896,21 +3416,25 @@ class NotebookOutput:
     def as_dict(self) -> dict:
         """Serializes the NotebookOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.result is not None: body['result'] = self.result
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.result is not None:
+            body["result"] = self.result
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NotebookOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.result is not None: body['result'] = self.result
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.result is not None:
+            body["result"] = self.result
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookOutput:
         """Deserializes the NotebookOutput from a dictionary."""
-        return cls(result=d.get('result', None), truncated=d.get('truncated', None))
+        return cls(result=d.get("result", None), truncated=d.get("truncated", None))
 
 
 @dataclass
@@ -2953,28 +3477,38 @@ class NotebookTask:
     def as_dict(self) -> dict:
         """Serializes the NotebookTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.base_parameters: body['base_parameters'] = self.base_parameters
-        if self.notebook_path is not None: body['notebook_path'] = self.notebook_path
-        if self.source is not None: body['source'] = self.source.value
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.base_parameters:
+            body["base_parameters"] = self.base_parameters
+        if self.notebook_path is not None:
+            body["notebook_path"] = self.notebook_path
+        if self.source is not None:
+            body["source"] = self.source.value
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NotebookTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.base_parameters: body['base_parameters'] = self.base_parameters
-        if self.notebook_path is not None: body['notebook_path'] = self.notebook_path
-        if self.source is not None: body['source'] = self.source
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.base_parameters:
+            body["base_parameters"] = self.base_parameters
+        if self.notebook_path is not None:
+            body["notebook_path"] = self.notebook_path
+        if self.source is not None:
+            body["source"] = self.source
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookTask:
         """Deserializes the NotebookTask from a dictionary."""
-        return cls(base_parameters=d.get('base_parameters', None),
-                   notebook_path=d.get('notebook_path', None),
-                   source=_enum(d, 'source', Source),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            base_parameters=d.get("base_parameters", None),
+            notebook_path=d.get("notebook_path", None),
+            source=_enum(d, "source", Source),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -2992,40 +3526,49 @@ class OutputSchemaInfo:
     def as_dict(self) -> dict:
         """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OutputSchemaInfo:
         """Deserializes the OutputSchemaInfo from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   expiration_time=d.get('expiration_time', None),
-                   schema_name=d.get('schema_name', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            expiration_time=d.get("expiration_time", None),
+            schema_name=d.get("schema_name", None),
+        )
 
 
 class PauseStatus(Enum):
 
-    PAUSED = 'PAUSED'
-    UNPAUSED = 'UNPAUSED'
+    PAUSED = "PAUSED"
+    UNPAUSED = "UNPAUSED"
 
 
 class PerformanceTarget(Enum):
     """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run
     on serverless compute should be. The performance mode on the job or pipeline should map to a
-    performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget)."""
+    performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).
+    """
 
-    COST_OPTIMIZED = 'COST_OPTIMIZED'
-    PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED'
+    COST_OPTIMIZED = "COST_OPTIMIZED"
+    PERFORMANCE_OPTIMIZED = "PERFORMANCE_OPTIMIZED"
 
 
 @dataclass
@@ -3039,29 +3582,35 @@ class PeriodicTriggerConfiguration:
     def as_dict(self) -> dict:
         """Serializes the PeriodicTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.interval is not None: body['interval'] = self.interval
-        if self.unit is not None: body['unit'] = self.unit.value
+        if self.interval is not None:
+            body["interval"] = self.interval
+        if self.unit is not None:
+            body["unit"] = self.unit.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PeriodicTriggerConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.interval is not None: body['interval'] = self.interval
-        if self.unit is not None: body['unit'] = self.unit
+        if self.interval is not None:
+            body["interval"] = self.interval
+        if self.unit is not None:
+            body["unit"] = self.unit
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PeriodicTriggerConfiguration:
         """Deserializes the PeriodicTriggerConfiguration from a dictionary."""
-        return cls(interval=d.get('interval', None),
-                   unit=_enum(d, 'unit', PeriodicTriggerConfigurationTimeUnit))
+        return cls(
+            interval=d.get("interval", None),
+            unit=_enum(d, "unit", PeriodicTriggerConfigurationTimeUnit),
+        )
 
 
 class PeriodicTriggerConfigurationTimeUnit(Enum):
 
-    DAYS = 'DAYS'
-    HOURS = 'HOURS'
-    WEEKS = 'WEEKS'
+    DAYS = "DAYS"
+    HOURS = "HOURS"
+    WEEKS = "WEEKS"
 
 
 @dataclass
@@ -3072,19 +3621,21 @@ class PipelineParams:
     def as_dict(self) -> dict:
         """Serializes the PipelineParams into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineParams into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineParams:
         """Deserializes the PipelineParams from a dictionary."""
-        return cls(full_refresh=d.get('full_refresh', None))
+        return cls(full_refresh=d.get("full_refresh", None))
 
 
 @dataclass
@@ -3098,21 +3649,28 @@ class PipelineTask:
     def as_dict(self) -> dict:
         """Serializes the PipelineTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineTask:
         """Deserializes the PipelineTask from a dictionary."""
-        return cls(full_refresh=d.get('full_refresh', None), pipeline_id=d.get('pipeline_id', None))
+        return cls(
+            full_refresh=d.get("full_refresh", None),
+            pipeline_id=d.get("pipeline_id", None),
+        )
 
 
 @dataclass
@@ -3135,28 +3693,38 @@ class PythonWheelTask:
     def as_dict(self) -> dict:
         """Serializes the PythonWheelTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.entry_point is not None: body['entry_point'] = self.entry_point
-        if self.named_parameters: body['named_parameters'] = self.named_parameters
-        if self.package_name is not None: body['package_name'] = self.package_name
-        if self.parameters: body['parameters'] = [v for v in self.parameters]
+        if self.entry_point is not None:
+            body["entry_point"] = self.entry_point
+        if self.named_parameters:
+            body["named_parameters"] = self.named_parameters
+        if self.package_name is not None:
+            body["package_name"] = self.package_name
+        if self.parameters:
+            body["parameters"] = [v for v in self.parameters]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PythonWheelTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.entry_point is not None: body['entry_point'] = self.entry_point
-        if self.named_parameters: body['named_parameters'] = self.named_parameters
-        if self.package_name is not None: body['package_name'] = self.package_name
-        if self.parameters: body['parameters'] = self.parameters
+        if self.entry_point is not None:
+            body["entry_point"] = self.entry_point
+        if self.named_parameters:
+            body["named_parameters"] = self.named_parameters
+        if self.package_name is not None:
+            body["package_name"] = self.package_name
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PythonWheelTask:
         """Deserializes the PythonWheelTask from a dictionary."""
-        return cls(entry_point=d.get('entry_point', None),
-                   named_parameters=d.get('named_parameters', None),
-                   package_name=d.get('package_name', None),
-                   parameters=d.get('parameters', None))
+        return cls(
+            entry_point=d.get("entry_point", None),
+            named_parameters=d.get("named_parameters", None),
+            package_name=d.get("package_name", None),
+            parameters=d.get("parameters", None),
+        )
 
 
 @dataclass
@@ -3175,21 +3743,28 @@ class QueueDetails:
     def as_dict(self) -> dict:
         """Serializes the QueueDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.code is not None: body['code'] = self.code.value
-        if self.message is not None: body['message'] = self.message
+        if self.code is not None:
+            body["code"] = self.code.value
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueueDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.code is not None: body['code'] = self.code
-        if self.message is not None: body['message'] = self.message
+        if self.code is not None:
+            body["code"] = self.code
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueueDetails:
         """Deserializes the QueueDetails from a dictionary."""
-        return cls(code=_enum(d, 'code', QueueDetailsCodeCode), message=d.get('message', None))
+        return cls(
+            code=_enum(d, "code", QueueDetailsCodeCode),
+            message=d.get("message", None),
+        )
 
 
 class QueueDetailsCodeCode(Enum):
@@ -3199,9 +3774,9 @@ class QueueDetailsCodeCode(Enum):
     `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of
     active run job tasks."""
 
-    ACTIVE_RUNS_LIMIT_REACHED = 'ACTIVE_RUNS_LIMIT_REACHED'
-    ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = 'ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED'
-    MAX_CONCURRENT_RUNS_REACHED = 'MAX_CONCURRENT_RUNS_REACHED'
+    ACTIVE_RUNS_LIMIT_REACHED = "ACTIVE_RUNS_LIMIT_REACHED"
+    ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = "ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED"
+    MAX_CONCURRENT_RUNS_REACHED = "MAX_CONCURRENT_RUNS_REACHED"
 
 
 @dataclass
@@ -3212,19 +3787,21 @@ class QueueSettings:
     def as_dict(self) -> dict:
         """Serializes the QueueSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueueSettings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueueSettings:
         """Deserializes the QueueSettings from a dictionary."""
-        return cls(enabled=d.get('enabled', None))
+        return cls(enabled=d.get("enabled", None))
 
 
 @dataclass
@@ -3253,44 +3830,60 @@ class RepairHistoryItem:
     def as_dict(self) -> dict:
         """Serializes the RepairHistoryItem into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.id is not None: body['id'] = self.id
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
-        if self.task_run_ids: body['task_run_ids'] = [v for v in self.task_run_ids]
-        if self.type is not None: body['type'] = self.type.value
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
+        if self.task_run_ids:
+            body["task_run_ids"] = [v for v in self.task_run_ids]
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepairHistoryItem into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.id is not None: body['id'] = self.id
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state
-        if self.status: body['status'] = self.status
-        if self.task_run_ids: body['task_run_ids'] = self.task_run_ids
-        if self.type is not None: body['type'] = self.type
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state
+        if self.status:
+            body["status"] = self.status
+        if self.task_run_ids:
+            body["task_run_ids"] = self.task_run_ids
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairHistoryItem:
         """Deserializes the RepairHistoryItem from a dictionary."""
-        return cls(end_time=d.get('end_time', None),
-                   id=d.get('id', None),
-                   start_time=d.get('start_time', None),
-                   state=_from_dict(d, 'state', RunState),
-                   status=_from_dict(d, 'status', RunStatus),
-                   task_run_ids=d.get('task_run_ids', None),
-                   type=_enum(d, 'type', RepairHistoryItemType))
+        return cls(
+            end_time=d.get("end_time", None),
+            id=d.get("id", None),
+            start_time=d.get("start_time", None),
+            state=_from_dict(d, "state", RunState),
+            status=_from_dict(d, "status", RunStatus),
+            task_run_ids=d.get("task_run_ids", None),
+            type=_enum(d, "type", RepairHistoryItemType),
+        )
 
 
 class RepairHistoryItemType(Enum):
     """The repair history item type. Indicates whether a run is the original run or a repair run."""
 
-    ORIGINAL = 'ORIGINAL'
-    REPAIR = 'REPAIR'
+    ORIGINAL = "ORIGINAL"
+    REPAIR = "REPAIR"
 
 
 @dataclass
@@ -3393,60 +3986,88 @@ class RepairRun:
     def as_dict(self) -> dict:
         """Serializes the RepairRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands]
-        if self.jar_params: body['jar_params'] = [v for v in self.jar_params]
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = [v for v in self.python_params]
+        if self.dbt_commands:
+            body["dbt_commands"] = [v for v in self.dbt_commands]
+        if self.jar_params:
+            body["jar_params"] = [v for v in self.jar_params]
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.latest_repair_id is not None:
+            body["latest_repair_id"] = self.latest_repair_id
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params.as_dict()
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = [v for v in self.python_params]
         if self.rerun_all_failed_tasks is not None:
-            body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks
-        if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks
-        if self.rerun_tasks: body['rerun_tasks'] = [v for v in self.rerun_tasks]
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params]
-        if self.sql_params: body['sql_params'] = self.sql_params
+            body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks
+        if self.rerun_dependent_tasks is not None:
+            body["rerun_dependent_tasks"] = self.rerun_dependent_tasks
+        if self.rerun_tasks:
+            body["rerun_tasks"] = [v for v in self.rerun_tasks]
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.spark_submit_params:
+            body["spark_submit_params"] = [v for v in self.spark_submit_params]
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepairRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
-        if self.jar_params: body['jar_params'] = self.jar_params
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = self.python_params
+        if self.dbt_commands:
+            body["dbt_commands"] = self.dbt_commands
+        if self.jar_params:
+            body["jar_params"] = self.jar_params
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.latest_repair_id is not None:
+            body["latest_repair_id"] = self.latest_repair_id
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = self.python_params
         if self.rerun_all_failed_tasks is not None:
-            body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks
-        if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks
-        if self.rerun_tasks: body['rerun_tasks'] = self.rerun_tasks
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
-        if self.sql_params: body['sql_params'] = self.sql_params
+            body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks
+        if self.rerun_dependent_tasks is not None:
+            body["rerun_dependent_tasks"] = self.rerun_dependent_tasks
+        if self.rerun_tasks:
+            body["rerun_tasks"] = self.rerun_tasks
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.spark_submit_params:
+            body["spark_submit_params"] = self.spark_submit_params
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairRun:
         """Deserializes the RepairRun from a dictionary."""
-        return cls(dbt_commands=d.get('dbt_commands', None),
-                   jar_params=d.get('jar_params', None),
-                   job_parameters=d.get('job_parameters', None),
-                   latest_repair_id=d.get('latest_repair_id', None),
-                   notebook_params=d.get('notebook_params', None),
-                   pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
-                   python_named_params=d.get('python_named_params', None),
-                   python_params=d.get('python_params', None),
-                   rerun_all_failed_tasks=d.get('rerun_all_failed_tasks', None),
-                   rerun_dependent_tasks=d.get('rerun_dependent_tasks', None),
-                   rerun_tasks=d.get('rerun_tasks', None),
-                   run_id=d.get('run_id', None),
-                   spark_submit_params=d.get('spark_submit_params', None),
-                   sql_params=d.get('sql_params', None))
+        return cls(
+            dbt_commands=d.get("dbt_commands", None),
+            jar_params=d.get("jar_params", None),
+            job_parameters=d.get("job_parameters", None),
+            latest_repair_id=d.get("latest_repair_id", None),
+            notebook_params=d.get("notebook_params", None),
+            pipeline_params=_from_dict(d, "pipeline_params", PipelineParams),
+            python_named_params=d.get("python_named_params", None),
+            python_params=d.get("python_params", None),
+            rerun_all_failed_tasks=d.get("rerun_all_failed_tasks", None),
+            rerun_dependent_tasks=d.get("rerun_dependent_tasks", None),
+            rerun_tasks=d.get("rerun_tasks", None),
+            run_id=d.get("run_id", None),
+            spark_submit_params=d.get("spark_submit_params", None),
+            sql_params=d.get("sql_params", None),
+        )
 
 
 @dataclass
@@ -3460,19 +4081,21 @@ class RepairRunResponse:
     def as_dict(self) -> dict:
         """Serializes the RepairRunResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.repair_id is not None: body['repair_id'] = self.repair_id
+        if self.repair_id is not None:
+            body["repair_id"] = self.repair_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepairRunResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.repair_id is not None: body['repair_id'] = self.repair_id
+        if self.repair_id is not None:
+            body["repair_id"] = self.repair_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairRunResponse:
         """Deserializes the RepairRunResponse from a dictionary."""
-        return cls(repair_id=d.get('repair_id', None))
+        return cls(repair_id=d.get("repair_id", None))
 
 
 @dataclass
@@ -3489,21 +4112,28 @@ class ResetJob:
     def as_dict(self) -> dict:
         """Serializes the ResetJob into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.new_settings: body['new_settings'] = self.new_settings.as_dict()
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.new_settings:
+            body["new_settings"] = self.new_settings.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResetJob into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.new_settings: body['new_settings'] = self.new_settings
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.new_settings:
+            body["new_settings"] = self.new_settings
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResetJob:
         """Deserializes the ResetJob from a dictionary."""
-        return cls(job_id=d.get('job_id', None), new_settings=_from_dict(d, 'new_settings', JobSettings))
+        return cls(
+            job_id=d.get("job_id", None),
+            new_settings=_from_dict(d, "new_settings", JobSettings),
+        )
 
 
 @dataclass
@@ -3534,21 +4164,25 @@ class ResolvedConditionTaskValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedConditionTaskValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.left is not None: body['left'] = self.left
-        if self.right is not None: body['right'] = self.right
+        if self.left is not None:
+            body["left"] = self.left
+        if self.right is not None:
+            body["right"] = self.right
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedConditionTaskValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.left is not None: body['left'] = self.left
-        if self.right is not None: body['right'] = self.right
+        if self.left is not None:
+            body["left"] = self.left
+        if self.right is not None:
+            body["right"] = self.right
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedConditionTaskValues:
         """Deserializes the ResolvedConditionTaskValues from a dictionary."""
-        return cls(left=d.get('left', None), right=d.get('right', None))
+        return cls(left=d.get("left", None), right=d.get("right", None))
 
 
 @dataclass
@@ -3558,19 +4192,21 @@ class ResolvedDbtTaskValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedDbtTaskValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.commands: body['commands'] = [v for v in self.commands]
+        if self.commands:
+            body["commands"] = [v for v in self.commands]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedDbtTaskValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.commands: body['commands'] = self.commands
+        if self.commands:
+            body["commands"] = self.commands
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedDbtTaskValues:
         """Deserializes the ResolvedDbtTaskValues from a dictionary."""
-        return cls(commands=d.get('commands', None))
+        return cls(commands=d.get("commands", None))
 
 
 @dataclass
@@ -3580,19 +4216,21 @@ class ResolvedNotebookTaskValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedNotebookTaskValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.base_parameters: body['base_parameters'] = self.base_parameters
+        if self.base_parameters:
+            body["base_parameters"] = self.base_parameters
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedNotebookTaskValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.base_parameters: body['base_parameters'] = self.base_parameters
+        if self.base_parameters:
+            body["base_parameters"] = self.base_parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedNotebookTaskValues:
         """Deserializes the ResolvedNotebookTaskValues from a dictionary."""
-        return cls(base_parameters=d.get('base_parameters', None))
+        return cls(base_parameters=d.get("base_parameters", None))
 
 
 @dataclass
@@ -3602,19 +4240,21 @@ class ResolvedParamPairValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedParamPairValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.parameters: body['parameters'] = self.parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedParamPairValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.parameters: body['parameters'] = self.parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedParamPairValues:
         """Deserializes the ResolvedParamPairValues from a dictionary."""
-        return cls(parameters=d.get('parameters', None))
+        return cls(parameters=d.get("parameters", None))
 
 
 @dataclass
@@ -3626,21 +4266,28 @@ class ResolvedPythonWheelTaskValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedPythonWheelTaskValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.named_parameters: body['named_parameters'] = self.named_parameters
-        if self.parameters: body['parameters'] = [v for v in self.parameters]
+        if self.named_parameters:
+            body["named_parameters"] = self.named_parameters
+        if self.parameters:
+            body["parameters"] = [v for v in self.parameters]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedPythonWheelTaskValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.named_parameters: body['named_parameters'] = self.named_parameters
-        if self.parameters: body['parameters'] = self.parameters
+        if self.named_parameters:
+            body["named_parameters"] = self.named_parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedPythonWheelTaskValues:
         """Deserializes the ResolvedPythonWheelTaskValues from a dictionary."""
-        return cls(named_parameters=d.get('named_parameters', None), parameters=d.get('parameters', None))
+        return cls(
+            named_parameters=d.get("named_parameters", None),
+            parameters=d.get("parameters", None),
+        )
 
 
 @dataclass
@@ -3652,21 +4299,28 @@ class ResolvedRunJobTaskValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedRunJobTaskValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.parameters: body['parameters'] = self.parameters
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedRunJobTaskValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.parameters: body['parameters'] = self.parameters
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedRunJobTaskValues:
         """Deserializes the ResolvedRunJobTaskValues from a dictionary."""
-        return cls(job_parameters=d.get('job_parameters', None), parameters=d.get('parameters', None))
+        return cls(
+            job_parameters=d.get("job_parameters", None),
+            parameters=d.get("parameters", None),
+        )
 
 
 @dataclass
@@ -3676,19 +4330,21 @@ class ResolvedStringParamsValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedStringParamsValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.parameters: body['parameters'] = [v for v in self.parameters]
+        if self.parameters:
+            body["parameters"] = [v for v in self.parameters]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedStringParamsValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.parameters: body['parameters'] = self.parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedStringParamsValues:
         """Deserializes the ResolvedStringParamsValues from a dictionary."""
-        return cls(parameters=d.get('parameters', None))
+        return cls(parameters=d.get("parameters", None))
 
 
 @dataclass
@@ -3716,46 +4372,68 @@ class ResolvedValues:
     def as_dict(self) -> dict:
         """Serializes the ResolvedValues into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
-        if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
-        if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
-        if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
-        if self.simulation_task: body['simulation_task'] = self.simulation_task.as_dict()
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
-        if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
+        if self.condition_task:
+            body["condition_task"] = self.condition_task.as_dict()
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task.as_dict()
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task.as_dict()
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task.as_dict()
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task.as_dict()
+        if self.simulation_task:
+            body["simulation_task"] = self.simulation_task.as_dict()
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task.as_dict()
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task.as_dict()
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task.as_dict()
+        if self.sql_task:
+            body["sql_task"] = self.sql_task.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResolvedValues into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.condition_task: body['condition_task'] = self.condition_task
-        if self.dbt_task: body['dbt_task'] = self.dbt_task
-        if self.notebook_task: body['notebook_task'] = self.notebook_task
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
-        if self.run_job_task: body['run_job_task'] = self.run_job_task
-        if self.simulation_task: body['simulation_task'] = self.simulation_task
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
-        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.condition_task:
+            body["condition_task"] = self.condition_task
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task
+        if self.simulation_task:
+            body["simulation_task"] = self.simulation_task
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task
+        if self.sql_task:
+            body["sql_task"] = self.sql_task
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedValues:
         """Deserializes the ResolvedValues from a dictionary."""
-        return cls(condition_task=_from_dict(d, 'condition_task', ResolvedConditionTaskValues),
-                   dbt_task=_from_dict(d, 'dbt_task', ResolvedDbtTaskValues),
-                   notebook_task=_from_dict(d, 'notebook_task', ResolvedNotebookTaskValues),
-                   python_wheel_task=_from_dict(d, 'python_wheel_task', ResolvedPythonWheelTaskValues),
-                   run_job_task=_from_dict(d, 'run_job_task', ResolvedRunJobTaskValues),
-                   simulation_task=_from_dict(d, 'simulation_task', ResolvedParamPairValues),
-                   spark_jar_task=_from_dict(d, 'spark_jar_task', ResolvedStringParamsValues),
-                   spark_python_task=_from_dict(d, 'spark_python_task', ResolvedStringParamsValues),
-                   spark_submit_task=_from_dict(d, 'spark_submit_task', ResolvedStringParamsValues),
-                   sql_task=_from_dict(d, 'sql_task', ResolvedParamPairValues))
+        return cls(
+            condition_task=_from_dict(d, "condition_task", ResolvedConditionTaskValues),
+            dbt_task=_from_dict(d, "dbt_task", ResolvedDbtTaskValues),
+            notebook_task=_from_dict(d, "notebook_task", ResolvedNotebookTaskValues),
+            python_wheel_task=_from_dict(d, "python_wheel_task", ResolvedPythonWheelTaskValues),
+            run_job_task=_from_dict(d, "run_job_task", ResolvedRunJobTaskValues),
+            simulation_task=_from_dict(d, "simulation_task", ResolvedParamPairValues),
+            spark_jar_task=_from_dict(d, "spark_jar_task", ResolvedStringParamsValues),
+            spark_python_task=_from_dict(d, "spark_python_task", ResolvedStringParamsValues),
+            spark_submit_task=_from_dict(d, "spark_submit_task", ResolvedStringParamsValues),
+            sql_task=_from_dict(d, "sql_task", ResolvedParamPairValues),
+        )
 
 
 @dataclass
@@ -3924,125 +4602,193 @@ class Run:
     def as_dict(self) -> dict:
         """Serializes the Run into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
-        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
-        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
-        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.description is not None: body['description'] = self.description
+        if self.attempt_number is not None:
+            body["attempt_number"] = self.attempt_number
+        if self.cleanup_duration is not None:
+            body["cleanup_duration"] = self.cleanup_duration
+        if self.cluster_instance:
+            body["cluster_instance"] = self.cluster_instance.as_dict()
+        if self.cluster_spec:
+            body["cluster_spec"] = self.cluster_spec.as_dict()
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.description is not None:
+            body["description"] = self.description
         if self.effective_performance_target is not None:
-            body['effective_performance_target'] = self.effective_performance_target.value
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
-        if self.git_source: body['git_source'] = self.git_source.as_dict()
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations]
-        if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
-        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+            body["effective_performance_target"] = self.effective_performance_target.value
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.execution_duration is not None:
+            body["execution_duration"] = self.execution_duration
+        if self.git_source:
+            body["git_source"] = self.git_source.as_dict()
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.iterations:
+            body["iterations"] = [v.as_dict() for v in self.iterations]
+        if self.job_clusters:
+            body["job_clusters"] = [v.as_dict() for v in self.job_clusters]
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = [v.as_dict() for v in self.job_parameters]
+        if self.job_run_id is not None:
+            body["job_run_id"] = self.job_run_id
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.number_in_job is not None:
+            body["number_in_job"] = self.number_in_job
         if self.original_attempt_run_id is not None:
-            body['original_attempt_run_id'] = self.original_attempt_run_id
-        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
-        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
-        if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_name is not None: body['run_name'] = self.run_name
-        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
-        if self.run_type is not None: body['run_type'] = self.run_type.value
-        if self.schedule: body['schedule'] = self.schedule.as_dict()
-        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
-        if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
-        if self.trigger is not None: body['trigger'] = self.trigger.value
-        if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
+            body["original_attempt_run_id"] = self.original_attempt_run_id
+        if self.overriding_parameters:
+            body["overriding_parameters"] = self.overriding_parameters.as_dict()
+        if self.queue_duration is not None:
+            body["queue_duration"] = self.queue_duration
+        if self.repair_history:
+            body["repair_history"] = [v.as_dict() for v in self.repair_history]
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_name is not None:
+            body["run_name"] = self.run_name
+        if self.run_page_url is not None:
+            body["run_page_url"] = self.run_page_url
+        if self.run_type is not None:
+            body["run_type"] = self.run_type.value
+        if self.schedule:
+            body["schedule"] = self.schedule.as_dict()
+        if self.setup_duration is not None:
+            body["setup_duration"] = self.setup_duration
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
+        if self.tasks:
+            body["tasks"] = [v.as_dict() for v in self.tasks]
+        if self.trigger is not None:
+            body["trigger"] = self.trigger.value
+        if self.trigger_info:
+            body["trigger_info"] = self.trigger_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Run into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
-        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
-        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
-        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.description is not None: body['description'] = self.description
+        if self.attempt_number is not None:
+            body["attempt_number"] = self.attempt_number
+        if self.cleanup_duration is not None:
+            body["cleanup_duration"] = self.cleanup_duration
+        if self.cluster_instance:
+            body["cluster_instance"] = self.cluster_instance
+        if self.cluster_spec:
+            body["cluster_spec"] = self.cluster_spec
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.description is not None:
+            body["description"] = self.description
         if self.effective_performance_target is not None:
-            body['effective_performance_target'] = self.effective_performance_target
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
-        if self.git_source: body['git_source'] = self.git_source
-        if self.has_more is not None: body['has_more'] = self.has_more
-        if self.iterations: body['iterations'] = self.iterations
-        if self.job_clusters: body['job_clusters'] = self.job_clusters
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+            body["effective_performance_target"] = self.effective_performance_target
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.execution_duration is not None:
+            body["execution_duration"] = self.execution_duration
+        if self.git_source:
+            body["git_source"] = self.git_source
+        if self.has_more is not None:
+            body["has_more"] = self.has_more
+        if self.iterations:
+            body["iterations"] = self.iterations
+        if self.job_clusters:
+            body["job_clusters"] = self.job_clusters
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.job_run_id is not None:
+            body["job_run_id"] = self.job_run_id
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.number_in_job is not None:
+            body["number_in_job"] = self.number_in_job
         if self.original_attempt_run_id is not None:
-            body['original_attempt_run_id'] = self.original_attempt_run_id
-        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters
-        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
-        if self.repair_history: body['repair_history'] = self.repair_history
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_name is not None: body['run_name'] = self.run_name
-        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
-        if self.run_type is not None: body['run_type'] = self.run_type
-        if self.schedule: body['schedule'] = self.schedule
-        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state
-        if self.status: body['status'] = self.status
-        if self.tasks: body['tasks'] = self.tasks
-        if self.trigger is not None: body['trigger'] = self.trigger
-        if self.trigger_info: body['trigger_info'] = self.trigger_info
+            body["original_attempt_run_id"] = self.original_attempt_run_id
+        if self.overriding_parameters:
+            body["overriding_parameters"] = self.overriding_parameters
+        if self.queue_duration is not None:
+            body["queue_duration"] = self.queue_duration
+        if self.repair_history:
+            body["repair_history"] = self.repair_history
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_name is not None:
+            body["run_name"] = self.run_name
+        if self.run_page_url is not None:
+            body["run_page_url"] = self.run_page_url
+        if self.run_type is not None:
+            body["run_type"] = self.run_type
+        if self.schedule:
+            body["schedule"] = self.schedule
+        if self.setup_duration is not None:
+            body["setup_duration"] = self.setup_duration
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state
+        if self.status:
+            body["status"] = self.status
+        if self.tasks:
+            body["tasks"] = self.tasks
+        if self.trigger is not None:
+            body["trigger"] = self.trigger
+        if self.trigger_info:
+            body["trigger_info"] = self.trigger_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Run:
         """Deserializes the Run from a dictionary."""
-        return cls(attempt_number=d.get('attempt_number', None),
-                   cleanup_duration=d.get('cleanup_duration', None),
-                   cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
-                   cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
-                   creator_user_name=d.get('creator_user_name', None),
-                   description=d.get('description', None),
-                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
-                   end_time=d.get('end_time', None),
-                   execution_duration=d.get('execution_duration', None),
-                   git_source=_from_dict(d, 'git_source', GitSource),
-                   has_more=d.get('has_more', None),
-                   iterations=_repeated_dict(d, 'iterations', RunTask),
-                   job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
-                   job_id=d.get('job_id', None),
-                   job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
-                   job_run_id=d.get('job_run_id', None),
-                   next_page_token=d.get('next_page_token', None),
-                   number_in_job=d.get('number_in_job', None),
-                   original_attempt_run_id=d.get('original_attempt_run_id', None),
-                   overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
-                   queue_duration=d.get('queue_duration', None),
-                   repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
-                   run_duration=d.get('run_duration', None),
-                   run_id=d.get('run_id', None),
-                   run_name=d.get('run_name', None),
-                   run_page_url=d.get('run_page_url', None),
-                   run_type=_enum(d, 'run_type', RunType),
-                   schedule=_from_dict(d, 'schedule', CronSchedule),
-                   setup_duration=d.get('setup_duration', None),
-                   start_time=d.get('start_time', None),
-                   state=_from_dict(d, 'state', RunState),
-                   status=_from_dict(d, 'status', RunStatus),
-                   tasks=_repeated_dict(d, 'tasks', RunTask),
-                   trigger=_enum(d, 'trigger', TriggerType),
-                   trigger_info=_from_dict(d, 'trigger_info', TriggerInfo))
+        return cls(
+            attempt_number=d.get("attempt_number", None),
+            cleanup_duration=d.get("cleanup_duration", None),
+            cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance),
+            cluster_spec=_from_dict(d, "cluster_spec", ClusterSpec),
+            creator_user_name=d.get("creator_user_name", None),
+            description=d.get("description", None),
+            effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
+            end_time=d.get("end_time", None),
+            execution_duration=d.get("execution_duration", None),
+            git_source=_from_dict(d, "git_source", GitSource),
+            has_more=d.get("has_more", None),
+            iterations=_repeated_dict(d, "iterations", RunTask),
+            job_clusters=_repeated_dict(d, "job_clusters", JobCluster),
+            job_id=d.get("job_id", None),
+            job_parameters=_repeated_dict(d, "job_parameters", JobParameter),
+            job_run_id=d.get("job_run_id", None),
+            next_page_token=d.get("next_page_token", None),
+            number_in_job=d.get("number_in_job", None),
+            original_attempt_run_id=d.get("original_attempt_run_id", None),
+            overriding_parameters=_from_dict(d, "overriding_parameters", RunParameters),
+            queue_duration=d.get("queue_duration", None),
+            repair_history=_repeated_dict(d, "repair_history", RepairHistoryItem),
+            run_duration=d.get("run_duration", None),
+            run_id=d.get("run_id", None),
+            run_name=d.get("run_name", None),
+            run_page_url=d.get("run_page_url", None),
+            run_type=_enum(d, "run_type", RunType),
+            schedule=_from_dict(d, "schedule", CronSchedule),
+            setup_duration=d.get("setup_duration", None),
+            start_time=d.get("start_time", None),
+            state=_from_dict(d, "state", RunState),
+            status=_from_dict(d, "status", RunStatus),
+            tasks=_repeated_dict(d, "tasks", RunTask),
+            trigger=_enum(d, "trigger", TriggerType),
+            trigger_info=_from_dict(d, "trigger_info", TriggerInfo),
+        )
 
 
 @dataclass
@@ -4073,28 +4819,38 @@ class RunConditionTask:
     def as_dict(self) -> dict:
         """Serializes the RunConditionTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.left is not None: body['left'] = self.left
-        if self.op is not None: body['op'] = self.op.value
-        if self.outcome is not None: body['outcome'] = self.outcome
-        if self.right is not None: body['right'] = self.right
+        if self.left is not None:
+            body["left"] = self.left
+        if self.op is not None:
+            body["op"] = self.op.value
+        if self.outcome is not None:
+            body["outcome"] = self.outcome
+        if self.right is not None:
+            body["right"] = self.right
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunConditionTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.left is not None: body['left'] = self.left
-        if self.op is not None: body['op'] = self.op
-        if self.outcome is not None: body['outcome'] = self.outcome
-        if self.right is not None: body['right'] = self.right
+        if self.left is not None:
+            body["left"] = self.left
+        if self.op is not None:
+            body["op"] = self.op
+        if self.outcome is not None:
+            body["outcome"] = self.outcome
+        if self.right is not None:
+            body["right"] = self.right
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunConditionTask:
         """Deserializes the RunConditionTask from a dictionary."""
-        return cls(left=d.get('left', None),
-                   op=_enum(d, 'op', ConditionTaskOp),
-                   outcome=d.get('outcome', None),
-                   right=d.get('right', None))
+        return cls(
+            left=d.get("left", None),
+            op=_enum(d, "op", ConditionTaskOp),
+            outcome=d.get("outcome", None),
+            right=d.get("right", None),
+        )
 
 
 @dataclass
@@ -4116,46 +4872,56 @@ class RunForEachTask:
     def as_dict(self) -> dict:
         """Serializes the RunForEachTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.concurrency is not None: body['concurrency'] = self.concurrency
-        if self.inputs is not None: body['inputs'] = self.inputs
-        if self.stats: body['stats'] = self.stats.as_dict()
-        if self.task: body['task'] = self.task.as_dict()
+        if self.concurrency is not None:
+            body["concurrency"] = self.concurrency
+        if self.inputs is not None:
+            body["inputs"] = self.inputs
+        if self.stats:
+            body["stats"] = self.stats.as_dict()
+        if self.task:
+            body["task"] = self.task.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunForEachTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.concurrency is not None: body['concurrency'] = self.concurrency
-        if self.inputs is not None: body['inputs'] = self.inputs
-        if self.stats: body['stats'] = self.stats
-        if self.task: body['task'] = self.task
+        if self.concurrency is not None:
+            body["concurrency"] = self.concurrency
+        if self.inputs is not None:
+            body["inputs"] = self.inputs
+        if self.stats:
+            body["stats"] = self.stats
+        if self.task:
+            body["task"] = self.task
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunForEachTask:
         """Deserializes the RunForEachTask from a dictionary."""
-        return cls(concurrency=d.get('concurrency', None),
-                   inputs=d.get('inputs', None),
-                   stats=_from_dict(d, 'stats', ForEachStats),
-                   task=_from_dict(d, 'task', Task))
+        return cls(
+            concurrency=d.get("concurrency", None),
+            inputs=d.get("inputs", None),
+            stats=_from_dict(d, "stats", ForEachStats),
+            task=_from_dict(d, "task", Task),
+        )
 
 
 class RunIf(Enum):
     """An optional value indicating the condition that determines whether the task should be run once
     its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`.
-    
+
     Possible values are: * `ALL_SUCCESS`: All dependencies have executed and succeeded *
     `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the
     dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been
     completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl
     dependencies have failed"""
 
-    ALL_DONE = 'ALL_DONE'
-    ALL_FAILED = 'ALL_FAILED'
-    ALL_SUCCESS = 'ALL_SUCCESS'
-    AT_LEAST_ONE_FAILED = 'AT_LEAST_ONE_FAILED'
-    AT_LEAST_ONE_SUCCESS = 'AT_LEAST_ONE_SUCCESS'
-    NONE_FAILED = 'NONE_FAILED'
+    ALL_DONE = "ALL_DONE"
+    ALL_FAILED = "ALL_FAILED"
+    ALL_SUCCESS = "ALL_SUCCESS"
+    AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED"
+    AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS"
+    NONE_FAILED = "NONE_FAILED"
 
 
 @dataclass
@@ -4166,19 +4932,21 @@ class RunJobOutput:
     def as_dict(self) -> dict:
         """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunJobOutput:
         """Deserializes the RunJobOutput from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -4266,46 +5034,68 @@ class RunJobTask:
     def as_dict(self) -> dict:
         """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands]
-        if self.jar_params: body['jar_params'] = [v for v in self.jar_params]
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = [v for v in self.python_params]
-        if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params]
-        if self.sql_params: body['sql_params'] = self.sql_params
+        if self.dbt_commands:
+            body["dbt_commands"] = [v for v in self.dbt_commands]
+        if self.jar_params:
+            body["jar_params"] = [v for v in self.jar_params]
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params.as_dict()
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = [v for v in self.python_params]
+        if self.spark_submit_params:
+            body["spark_submit_params"] = [v for v in self.spark_submit_params]
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunJobTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
-        if self.jar_params: body['jar_params'] = self.jar_params
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = self.python_params
-        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
-        if self.sql_params: body['sql_params'] = self.sql_params
+        if self.dbt_commands:
+            body["dbt_commands"] = self.dbt_commands
+        if self.jar_params:
+            body["jar_params"] = self.jar_params
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = self.python_params
+        if self.spark_submit_params:
+            body["spark_submit_params"] = self.spark_submit_params
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunJobTask:
         """Deserializes the RunJobTask from a dictionary."""
-        return cls(dbt_commands=d.get('dbt_commands', None),
-                   jar_params=d.get('jar_params', None),
-                   job_id=d.get('job_id', None),
-                   job_parameters=d.get('job_parameters', None),
-                   notebook_params=d.get('notebook_params', None),
-                   pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
-                   python_named_params=d.get('python_named_params', None),
-                   python_params=d.get('python_params', None),
-                   spark_submit_params=d.get('spark_submit_params', None),
-                   sql_params=d.get('sql_params', None))
+        return cls(
+            dbt_commands=d.get("dbt_commands", None),
+            jar_params=d.get("jar_params", None),
+            job_id=d.get("job_id", None),
+            job_parameters=d.get("job_parameters", None),
+            notebook_params=d.get("notebook_params", None),
+            pipeline_params=_from_dict(d, "pipeline_params", PipelineParams),
+            python_named_params=d.get("python_named_params", None),
+            python_params=d.get("python_params", None),
+            spark_submit_params=d.get("spark_submit_params", None),
+            sql_params=d.get("sql_params", None),
+        )
 
 
 class RunLifeCycleState(Enum):
@@ -4319,28 +5109,29 @@ class RunLifeCycleState(Enum):
     exceptional state that indicates a failure in the Jobs service, such as network failure over a
     long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service
     terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is
-    blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry."""
+    blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.
+    """
 
-    BLOCKED = 'BLOCKED'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    PENDING = 'PENDING'
-    QUEUED = 'QUEUED'
-    RUNNING = 'RUNNING'
-    SKIPPED = 'SKIPPED'
-    TERMINATED = 'TERMINATED'
-    TERMINATING = 'TERMINATING'
-    WAITING_FOR_RETRY = 'WAITING_FOR_RETRY'
+    BLOCKED = "BLOCKED"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    PENDING = "PENDING"
+    QUEUED = "QUEUED"
+    RUNNING = "RUNNING"
+    SKIPPED = "SKIPPED"
+    TERMINATED = "TERMINATED"
+    TERMINATING = "TERMINATING"
+    WAITING_FOR_RETRY = "WAITING_FOR_RETRY"
 
 
 class RunLifecycleStateV2State(Enum):
     """The current state of the run."""
 
-    BLOCKED = 'BLOCKED'
-    PENDING = 'PENDING'
-    QUEUED = 'QUEUED'
-    RUNNING = 'RUNNING'
-    TERMINATED = 'TERMINATED'
-    TERMINATING = 'TERMINATING'
+    BLOCKED = "BLOCKED"
+    PENDING = "PENDING"
+    QUEUED = "QUEUED"
+    RUNNING = "RUNNING"
+    TERMINATED = "TERMINATED"
+    TERMINATING = "TERMINATING"
 
 
 @dataclass
@@ -4454,58 +5245,88 @@ class RunNow:
     def as_dict(self) -> dict:
         """Serializes the RunNow into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands]
-        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
-        if self.jar_params: body['jar_params'] = [v for v in self.jar_params]
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.only: body['only'] = [v for v in self.only]
-        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = [v for v in self.python_params]
-        if self.queue: body['queue'] = self.queue.as_dict()
-        if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params]
-        if self.sql_params: body['sql_params'] = self.sql_params
+        if self.dbt_commands:
+            body["dbt_commands"] = [v for v in self.dbt_commands]
+        if self.idempotency_token is not None:
+            body["idempotency_token"] = self.idempotency_token
+        if self.jar_params:
+            body["jar_params"] = [v for v in self.jar_params]
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.only:
+            body["only"] = [v for v in self.only]
+        if self.performance_target is not None:
+            body["performance_target"] = self.performance_target.value
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params.as_dict()
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = [v for v in self.python_params]
+        if self.queue:
+            body["queue"] = self.queue.as_dict()
+        if self.spark_submit_params:
+            body["spark_submit_params"] = [v for v in self.spark_submit_params]
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunNow into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
-        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
-        if self.jar_params: body['jar_params'] = self.jar_params
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_parameters: body['job_parameters'] = self.job_parameters
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.only: body['only'] = self.only
-        if self.performance_target is not None: body['performance_target'] = self.performance_target
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = self.python_params
-        if self.queue: body['queue'] = self.queue
-        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
-        if self.sql_params: body['sql_params'] = self.sql_params
+        if self.dbt_commands:
+            body["dbt_commands"] = self.dbt_commands
+        if self.idempotency_token is not None:
+            body["idempotency_token"] = self.idempotency_token
+        if self.jar_params:
+            body["jar_params"] = self.jar_params
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.job_parameters:
+            body["job_parameters"] = self.job_parameters
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.only:
+            body["only"] = self.only
+        if self.performance_target is not None:
+            body["performance_target"] = self.performance_target
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = self.python_params
+        if self.queue:
+            body["queue"] = self.queue
+        if self.spark_submit_params:
+            body["spark_submit_params"] = self.spark_submit_params
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunNow:
         """Deserializes the RunNow from a dictionary."""
-        return cls(dbt_commands=d.get('dbt_commands', None),
-                   idempotency_token=d.get('idempotency_token', None),
-                   jar_params=d.get('jar_params', None),
-                   job_id=d.get('job_id', None),
-                   job_parameters=d.get('job_parameters', None),
-                   notebook_params=d.get('notebook_params', None),
-                   only=d.get('only', None),
-                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
-                   pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
-                   python_named_params=d.get('python_named_params', None),
-                   python_params=d.get('python_params', None),
-                   queue=_from_dict(d, 'queue', QueueSettings),
-                   spark_submit_params=d.get('spark_submit_params', None),
-                   sql_params=d.get('sql_params', None))
+        return cls(
+            dbt_commands=d.get("dbt_commands", None),
+            idempotency_token=d.get("idempotency_token", None),
+            jar_params=d.get("jar_params", None),
+            job_id=d.get("job_id", None),
+            job_parameters=d.get("job_parameters", None),
+            notebook_params=d.get("notebook_params", None),
+            only=d.get("only", None),
+            performance_target=_enum(d, "performance_target", PerformanceTarget),
+            pipeline_params=_from_dict(d, "pipeline_params", PipelineParams),
+            python_named_params=d.get("python_named_params", None),
+            python_params=d.get("python_params", None),
+            queue=_from_dict(d, "queue", QueueSettings),
+            spark_submit_params=d.get("spark_submit_params", None),
+            sql_params=d.get("sql_params", None),
+        )
 
 
 @dataclass
@@ -4521,21 +5342,28 @@ class RunNowResponse:
     def as_dict(self) -> dict:
         """Serializes the RunNowResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.number_in_job is not None:
+            body["number_in_job"] = self.number_in_job
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunNowResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.number_in_job is not None:
+            body["number_in_job"] = self.number_in_job
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunNowResponse:
         """Deserializes the RunNowResponse from a dictionary."""
-        return cls(number_in_job=d.get('number_in_job', None), run_id=d.get('run_id', None))
+        return cls(
+            number_in_job=d.get("number_in_job", None),
+            run_id=d.get("run_id", None),
+        )
 
 
 @dataclass
@@ -4590,51 +5418,76 @@ def as_dict(self) -> dict:
         """Serializes the RunOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.clean_rooms_notebook_output:
-            body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict()
-        if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict()
-        if self.error is not None: body['error'] = self.error
-        if self.error_trace is not None: body['error_trace'] = self.error_trace
-        if self.info is not None: body['info'] = self.info
-        if self.logs is not None: body['logs'] = self.logs
-        if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated
-        if self.metadata: body['metadata'] = self.metadata.as_dict()
-        if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict()
-        if self.run_job_output: body['run_job_output'] = self.run_job_output.as_dict()
-        if self.sql_output: body['sql_output'] = self.sql_output.as_dict()
+            body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output.as_dict()
+        if self.dbt_output:
+            body["dbt_output"] = self.dbt_output.as_dict()
+        if self.error is not None:
+            body["error"] = self.error
+        if self.error_trace is not None:
+            body["error_trace"] = self.error_trace
+        if self.info is not None:
+            body["info"] = self.info
+        if self.logs is not None:
+            body["logs"] = self.logs
+        if self.logs_truncated is not None:
+            body["logs_truncated"] = self.logs_truncated
+        if self.metadata:
+            body["metadata"] = self.metadata.as_dict()
+        if self.notebook_output:
+            body["notebook_output"] = self.notebook_output.as_dict()
+        if self.run_job_output:
+            body["run_job_output"] = self.run_job_output.as_dict()
+        if self.sql_output:
+            body["sql_output"] = self.sql_output.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunOutput into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.clean_rooms_notebook_output:
-            body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output
-        if self.dbt_output: body['dbt_output'] = self.dbt_output
-        if self.error is not None: body['error'] = self.error
-        if self.error_trace is not None: body['error_trace'] = self.error_trace
-        if self.info is not None: body['info'] = self.info
-        if self.logs is not None: body['logs'] = self.logs
-        if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated
-        if self.metadata: body['metadata'] = self.metadata
-        if self.notebook_output: body['notebook_output'] = self.notebook_output
-        if self.run_job_output: body['run_job_output'] = self.run_job_output
-        if self.sql_output: body['sql_output'] = self.sql_output
+            body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output
+        if self.dbt_output:
+            body["dbt_output"] = self.dbt_output
+        if self.error is not None:
+            body["error"] = self.error
+        if self.error_trace is not None:
+            body["error_trace"] = self.error_trace
+        if self.info is not None:
+            body["info"] = self.info
+        if self.logs is not None:
+            body["logs"] = self.logs
+        if self.logs_truncated is not None:
+            body["logs_truncated"] = self.logs_truncated
+        if self.metadata:
+            body["metadata"] = self.metadata
+        if self.notebook_output:
+            body["notebook_output"] = self.notebook_output
+        if self.run_job_output:
+            body["run_job_output"] = self.run_job_output
+        if self.sql_output:
+            body["sql_output"] = self.sql_output
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunOutput:
         """Deserializes the RunOutput from a dictionary."""
-        return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output',
-                                                          CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput),
-                   dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
-                   error=d.get('error', None),
-                   error_trace=d.get('error_trace', None),
-                   info=d.get('info', None),
-                   logs=d.get('logs', None),
-                   logs_truncated=d.get('logs_truncated', None),
-                   metadata=_from_dict(d, 'metadata', Run),
-                   notebook_output=_from_dict(d, 'notebook_output', NotebookOutput),
-                   run_job_output=_from_dict(d, 'run_job_output', RunJobOutput),
-                   sql_output=_from_dict(d, 'sql_output', SqlOutput))
+        return cls(
+            clean_rooms_notebook_output=_from_dict(
+                d,
+                "clean_rooms_notebook_output",
+                CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput,
+            ),
+            dbt_output=_from_dict(d, "dbt_output", DbtOutput),
+            error=d.get("error", None),
+            error_trace=d.get("error_trace", None),
+            info=d.get("info", None),
+            logs=d.get("logs", None),
+            logs_truncated=d.get("logs_truncated", None),
+            metadata=_from_dict(d, "metadata", Run),
+            notebook_output=_from_dict(d, "notebook_output", NotebookOutput),
+            run_job_output=_from_dict(d, "run_job_output", RunJobOutput),
+            sql_output=_from_dict(d, "sql_output", SqlOutput),
+        )
 
 
 @dataclass
@@ -4716,40 +5569,58 @@ class RunParameters:
     def as_dict(self) -> dict:
         """Serializes the RunParameters into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands]
-        if self.jar_params: body['jar_params'] = [v for v in self.jar_params]
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = [v for v in self.python_params]
-        if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params]
-        if self.sql_params: body['sql_params'] = self.sql_params
+        if self.dbt_commands:
+            body["dbt_commands"] = [v for v in self.dbt_commands]
+        if self.jar_params:
+            body["jar_params"] = [v for v in self.jar_params]
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params.as_dict()
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = [v for v in self.python_params]
+        if self.spark_submit_params:
+            body["spark_submit_params"] = [v for v in self.spark_submit_params]
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunParameters into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
-        if self.jar_params: body['jar_params'] = self.jar_params
-        if self.notebook_params: body['notebook_params'] = self.notebook_params
-        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
-        if self.python_named_params: body['python_named_params'] = self.python_named_params
-        if self.python_params: body['python_params'] = self.python_params
-        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
-        if self.sql_params: body['sql_params'] = self.sql_params
+        if self.dbt_commands:
+            body["dbt_commands"] = self.dbt_commands
+        if self.jar_params:
+            body["jar_params"] = self.jar_params
+        if self.notebook_params:
+            body["notebook_params"] = self.notebook_params
+        if self.pipeline_params:
+            body["pipeline_params"] = self.pipeline_params
+        if self.python_named_params:
+            body["python_named_params"] = self.python_named_params
+        if self.python_params:
+            body["python_params"] = self.python_params
+        if self.spark_submit_params:
+            body["spark_submit_params"] = self.spark_submit_params
+        if self.sql_params:
+            body["sql_params"] = self.sql_params
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunParameters:
         """Deserializes the RunParameters from a dictionary."""
-        return cls(dbt_commands=d.get('dbt_commands', None),
-                   jar_params=d.get('jar_params', None),
-                   notebook_params=d.get('notebook_params', None),
-                   pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
-                   python_named_params=d.get('python_named_params', None),
-                   python_params=d.get('python_params', None),
-                   spark_submit_params=d.get('spark_submit_params', None),
-                   sql_params=d.get('sql_params', None))
+        return cls(
+            dbt_commands=d.get("dbt_commands", None),
+            jar_params=d.get("jar_params", None),
+            notebook_params=d.get("notebook_params", None),
+            pipeline_params=_from_dict(d, "pipeline_params", PipelineParams),
+            python_named_params=d.get("python_named_params", None),
+            python_params=d.get("python_params", None),
+            spark_submit_params=d.get("spark_submit_params", None),
+            sql_params=d.get("sql_params", None),
+        )
 
 
 class RunResultState(Enum):
@@ -4763,16 +5634,16 @@ class RunResultState(Enum):
     `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`:
     The run was skipped because it was disabled explicitly by the user."""
 
-    CANCELED = 'CANCELED'
-    DISABLED = 'DISABLED'
-    EXCLUDED = 'EXCLUDED'
-    FAILED = 'FAILED'
-    MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
-    SUCCESS = 'SUCCESS'
-    SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
-    TIMEDOUT = 'TIMEDOUT'
-    UPSTREAM_CANCELED = 'UPSTREAM_CANCELED'
-    UPSTREAM_FAILED = 'UPSTREAM_FAILED'
+    CANCELED = "CANCELED"
+    DISABLED = "DISABLED"
+    EXCLUDED = "EXCLUDED"
+    FAILED = "FAILED"
+    MAXIMUM_CONCURRENT_RUNS_REACHED = "MAXIMUM_CONCURRENT_RUNS_REACHED"
+    SUCCESS = "SUCCESS"
+    SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES"
+    TIMEDOUT = "TIMEDOUT"
+    UPSTREAM_CANCELED = "UPSTREAM_CANCELED"
+    UPSTREAM_FAILED = "UPSTREAM_FAILED"
 
 
 @dataclass
@@ -4800,33 +5671,43 @@ class RunState:
     def as_dict(self) -> dict:
         """Serializes the RunState into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value
-        if self.queue_reason is not None: body['queue_reason'] = self.queue_reason
-        if self.result_state is not None: body['result_state'] = self.result_state.value
-        if self.state_message is not None: body['state_message'] = self.state_message
+        if self.life_cycle_state is not None:
+            body["life_cycle_state"] = self.life_cycle_state.value
+        if self.queue_reason is not None:
+            body["queue_reason"] = self.queue_reason
+        if self.result_state is not None:
+            body["result_state"] = self.result_state.value
+        if self.state_message is not None:
+            body["state_message"] = self.state_message
         if self.user_cancelled_or_timedout is not None:
-            body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout
+            body["user_cancelled_or_timedout"] = self.user_cancelled_or_timedout
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunState into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state
-        if self.queue_reason is not None: body['queue_reason'] = self.queue_reason
-        if self.result_state is not None: body['result_state'] = self.result_state
-        if self.state_message is not None: body['state_message'] = self.state_message
+        if self.life_cycle_state is not None:
+            body["life_cycle_state"] = self.life_cycle_state
+        if self.queue_reason is not None:
+            body["queue_reason"] = self.queue_reason
+        if self.result_state is not None:
+            body["result_state"] = self.result_state
+        if self.state_message is not None:
+            body["state_message"] = self.state_message
         if self.user_cancelled_or_timedout is not None:
-            body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout
+            body["user_cancelled_or_timedout"] = self.user_cancelled_or_timedout
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunState:
         """Deserializes the RunState from a dictionary."""
-        return cls(life_cycle_state=_enum(d, 'life_cycle_state', RunLifeCycleState),
-                   queue_reason=d.get('queue_reason', None),
-                   result_state=_enum(d, 'result_state', RunResultState),
-                   state_message=d.get('state_message', None),
-                   user_cancelled_or_timedout=d.get('user_cancelled_or_timedout', None))
+        return cls(
+            life_cycle_state=_enum(d, "life_cycle_state", RunLifeCycleState),
+            queue_reason=d.get("queue_reason", None),
+            result_state=_enum(d, "result_state", RunResultState),
+            state_message=d.get("state_message", None),
+            user_cancelled_or_timedout=d.get("user_cancelled_or_timedout", None),
+        )
 
 
 @dataclass
@@ -4846,25 +5727,33 @@ class RunStatus:
     def as_dict(self) -> dict:
         """Serializes the RunStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.queue_details: body['queue_details'] = self.queue_details.as_dict()
-        if self.state is not None: body['state'] = self.state.value
-        if self.termination_details: body['termination_details'] = self.termination_details.as_dict()
+        if self.queue_details:
+            body["queue_details"] = self.queue_details.as_dict()
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.termination_details:
+            body["termination_details"] = self.termination_details.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.queue_details: body['queue_details'] = self.queue_details
-        if self.state is not None: body['state'] = self.state
-        if self.termination_details: body['termination_details'] = self.termination_details
+        if self.queue_details:
+            body["queue_details"] = self.queue_details
+        if self.state is not None:
+            body["state"] = self.state
+        if self.termination_details:
+            body["termination_details"] = self.termination_details
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunStatus:
         """Deserializes the RunStatus from a dictionary."""
-        return cls(queue_details=_from_dict(d, 'queue_details', QueueDetails),
-                   state=_enum(d, 'state', RunLifecycleStateV2State),
-                   termination_details=_from_dict(d, 'termination_details', TerminationDetails))
+        return cls(
+            queue_details=_from_dict(d, "queue_details", QueueDetails),
+            state=_enum(d, "state", RunLifecycleStateV2State),
+            termination_details=_from_dict(d, "termination_details", TerminationDetails),
+        )
 
 
 @dataclass
@@ -5064,158 +5953,241 @@ class RunTask:
     def as_dict(self) -> dict:
         """Serializes the RunTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.attempt_number is not None:
+            body["attempt_number"] = self.attempt_number
         if self.clean_rooms_notebook_task:
-            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
-        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
-        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
-        if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
-        if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
-        if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
-        if self.description is not None: body['description'] = self.description
-        if self.disabled is not None: body['disabled'] = self.disabled
+            body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict()
+        if self.cleanup_duration is not None:
+            body["cleanup_duration"] = self.cleanup_duration
+        if self.cluster_instance:
+            body["cluster_instance"] = self.cluster_instance.as_dict()
+        if self.condition_task:
+            body["condition_task"] = self.condition_task.as_dict()
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task.as_dict()
+        if self.depends_on:
+            body["depends_on"] = [v.as_dict() for v in self.depends_on]
+        if self.description is not None:
+            body["description"] = self.description
+        if self.disabled is not None:
+            body["disabled"] = self.disabled
         if self.effective_performance_target is not None:
-            body['effective_performance_target'] = self.effective_performance_target.value
-        if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
-        if self.git_source: body['git_source'] = self.git_source.as_dict()
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
-        if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
-        if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
-        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
-        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
-        if self.resolved_values: body['resolved_values'] = self.resolved_values.as_dict()
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_if is not None: body['run_if'] = self.run_if.value
-        if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
-        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
-        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
-        if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state.as_dict()
-        if self.status: body['status'] = self.status.as_dict()
-        if self.task_key is not None: body['task_key'] = self.task_key
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
+            body["effective_performance_target"] = self.effective_performance_target.value
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications.as_dict()
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.execution_duration is not None:
+            body["execution_duration"] = self.execution_duration
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.for_each_task:
+            body["for_each_task"] = self.for_each_task.as_dict()
+        if self.git_source:
+            body["git_source"] = self.git_source.as_dict()
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster.as_dict()
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task.as_dict()
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings.as_dict()
+        if self.pipeline_task:
+            body["pipeline_task"] = self.pipeline_task.as_dict()
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task.as_dict()
+        if self.queue_duration is not None:
+            body["queue_duration"] = self.queue_duration
+        if self.resolved_values:
+            body["resolved_values"] = self.resolved_values.as_dict()
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_if is not None:
+            body["run_if"] = self.run_if.value
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task.as_dict()
+        if self.run_page_url is not None:
+            body["run_page_url"] = self.run_page_url
+        if self.setup_duration is not None:
+            body["setup_duration"] = self.setup_duration
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task.as_dict()
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task.as_dict()
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task.as_dict()
+        if self.sql_task:
+            body["sql_task"] = self.sql_task.as_dict()
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.status:
+            body["status"] = self.status.as_dict()
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
-        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
-        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
-        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
-        if self.condition_task: body['condition_task'] = self.condition_task
-        if self.dbt_task: body['dbt_task'] = self.dbt_task
-        if self.depends_on: body['depends_on'] = self.depends_on
-        if self.description is not None: body['description'] = self.description
-        if self.disabled is not None: body['disabled'] = self.disabled
+        if self.attempt_number is not None:
+            body["attempt_number"] = self.attempt_number
+        if self.clean_rooms_notebook_task:
+            body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task
+        if self.cleanup_duration is not None:
+            body["cleanup_duration"] = self.cleanup_duration
+        if self.cluster_instance:
+            body["cluster_instance"] = self.cluster_instance
+        if self.condition_task:
+            body["condition_task"] = self.condition_task
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task
+        if self.depends_on:
+            body["depends_on"] = self.depends_on
+        if self.description is not None:
+            body["description"] = self.description
+        if self.disabled is not None:
+            body["disabled"] = self.disabled
         if self.effective_performance_target is not None:
-            body['effective_performance_target'] = self.effective_performance_target
-        if self.email_notifications: body['email_notifications'] = self.email_notifications
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.for_each_task: body['for_each_task'] = self.for_each_task
-        if self.git_source: body['git_source'] = self.git_source
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.libraries: body['libraries'] = self.libraries
-        if self.new_cluster: body['new_cluster'] = self.new_cluster
-        if self.notebook_task: body['notebook_task'] = self.notebook_task
-        if self.notification_settings: body['notification_settings'] = self.notification_settings
-        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
-        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
-        if self.resolved_values: body['resolved_values'] = self.resolved_values
-        if self.run_duration is not None: body['run_duration'] = self.run_duration
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_if is not None: body['run_if'] = self.run_if
-        if self.run_job_task: body['run_job_task'] = self.run_job_task
-        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
-        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
-        if self.sql_task: body['sql_task'] = self.sql_task
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.state: body['state'] = self.state
-        if self.status: body['status'] = self.status
-        if self.task_key is not None: body['task_key'] = self.task_key
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+            body["effective_performance_target"] = self.effective_performance_target
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.execution_duration is not None:
+            body["execution_duration"] = self.execution_duration
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.for_each_task:
+            body["for_each_task"] = self.for_each_task
+        if self.git_source:
+            body["git_source"] = self.git_source
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings
+        if self.pipeline_task:
+            body["pipeline_task"] = self.pipeline_task
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task
+        if self.queue_duration is not None:
+            body["queue_duration"] = self.queue_duration
+        if self.resolved_values:
+            body["resolved_values"] = self.resolved_values
+        if self.run_duration is not None:
+            body["run_duration"] = self.run_duration
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_if is not None:
+            body["run_if"] = self.run_if
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task
+        if self.run_page_url is not None:
+            body["run_page_url"] = self.run_page_url
+        if self.setup_duration is not None:
+            body["setup_duration"] = self.setup_duration
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task
+        if self.sql_task:
+            body["sql_task"] = self.sql_task
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.state:
+            body["state"] = self.state
+        if self.status:
+            body["status"] = self.status
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunTask:
         """Deserializes the RunTask from a dictionary."""
-        return cls(attempt_number=d.get('attempt_number', None),
-                   clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
-                                                        CleanRoomsNotebookTask),
-                   cleanup_duration=d.get('cleanup_duration', None),
-                   cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
-                   condition_task=_from_dict(d, 'condition_task', RunConditionTask),
-                   dbt_task=_from_dict(d, 'dbt_task', DbtTask),
-                   depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
-                   description=d.get('description', None),
-                   disabled=d.get('disabled', None),
-                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
-                   email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
-                   end_time=d.get('end_time', None),
-                   environment_key=d.get('environment_key', None),
-                   execution_duration=d.get('execution_duration', None),
-                   existing_cluster_id=d.get('existing_cluster_id', None),
-                   for_each_task=_from_dict(d, 'for_each_task', RunForEachTask),
-                   git_source=_from_dict(d, 'git_source', GitSource),
-                   job_cluster_key=d.get('job_cluster_key', None),
-                   libraries=_repeated_dict(d, 'libraries', compute.Library),
-                   new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec),
-                   notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
-                   notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings),
-                   pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
-                   python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
-                   queue_duration=d.get('queue_duration', None),
-                   resolved_values=_from_dict(d, 'resolved_values', ResolvedValues),
-                   run_duration=d.get('run_duration', None),
-                   run_id=d.get('run_id', None),
-                   run_if=_enum(d, 'run_if', RunIf),
-                   run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
-                   run_page_url=d.get('run_page_url', None),
-                   setup_duration=d.get('setup_duration', None),
-                   spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
-                   spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
-                   spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask),
-                   sql_task=_from_dict(d, 'sql_task', SqlTask),
-                   start_time=d.get('start_time', None),
-                   state=_from_dict(d, 'state', RunState),
-                   status=_from_dict(d, 'status', RunStatus),
-                   task_key=d.get('task_key', None),
-                   timeout_seconds=d.get('timeout_seconds', None),
-                   webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
+        return cls(
+            attempt_number=d.get("attempt_number", None),
+            clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask),
+            cleanup_duration=d.get("cleanup_duration", None),
+            cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance),
+            condition_task=_from_dict(d, "condition_task", RunConditionTask),
+            dbt_task=_from_dict(d, "dbt_task", DbtTask),
+            depends_on=_repeated_dict(d, "depends_on", TaskDependency),
+            description=d.get("description", None),
+            disabled=d.get("disabled", None),
+            effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget),
+            email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
+            end_time=d.get("end_time", None),
+            environment_key=d.get("environment_key", None),
+            execution_duration=d.get("execution_duration", None),
+            existing_cluster_id=d.get("existing_cluster_id", None),
+            for_each_task=_from_dict(d, "for_each_task", RunForEachTask),
+            git_source=_from_dict(d, "git_source", GitSource),
+            job_cluster_key=d.get("job_cluster_key", None),
+            libraries=_repeated_dict(d, "libraries", compute.Library),
+            new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec),
+            notebook_task=_from_dict(d, "notebook_task", NotebookTask),
+            notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings),
+            pipeline_task=_from_dict(d, "pipeline_task", PipelineTask),
+            python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask),
+            queue_duration=d.get("queue_duration", None),
+            resolved_values=_from_dict(d, "resolved_values", ResolvedValues),
+            run_duration=d.get("run_duration", None),
+            run_id=d.get("run_id", None),
+            run_if=_enum(d, "run_if", RunIf),
+            run_job_task=_from_dict(d, "run_job_task", RunJobTask),
+            run_page_url=d.get("run_page_url", None),
+            setup_duration=d.get("setup_duration", None),
+            spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask),
+            spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask),
+            spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask),
+            sql_task=_from_dict(d, "sql_task", SqlTask),
+            start_time=d.get("start_time", None),
+            state=_from_dict(d, "state", RunState),
+            status=_from_dict(d, "status", RunStatus),
+            task_key=d.get("task_key", None),
+            timeout_seconds=d.get("timeout_seconds", None),
+            webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
+        )
 
 
 class RunType(Enum):
     """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. *
     `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit
     run. A run created with :method:jobs/submit.
-    
-    [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow"""
 
-    JOB_RUN = 'JOB_RUN'
-    SUBMIT_RUN = 'SUBMIT_RUN'
-    WORKFLOW_RUN = 'WORKFLOW_RUN'
+    [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow
+    """
+
+    JOB_RUN = "JOB_RUN"
+    SUBMIT_RUN = "SUBMIT_RUN"
+    WORKFLOW_RUN = "WORKFLOW_RUN"
 
 
 class Source(Enum):
@@ -5227,8 +6199,8 @@ class Source(Enum):
     * `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in
     cloud Git provider."""
 
-    GIT = 'GIT'
-    WORKSPACE = 'WORKSPACE'
+    GIT = "GIT"
+    WORKSPACE = "WORKSPACE"
 
 
 @dataclass
@@ -5257,28 +6229,38 @@ class SparkJarTask:
     def as_dict(self) -> dict:
         """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
-        if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
-        if self.parameters: body['parameters'] = [v for v in self.parameters]
-        if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
+        if self.jar_uri is not None:
+            body["jar_uri"] = self.jar_uri
+        if self.main_class_name is not None:
+            body["main_class_name"] = self.main_class_name
+        if self.parameters:
+            body["parameters"] = [v for v in self.parameters]
+        if self.run_as_repl is not None:
+            body["run_as_repl"] = self.run_as_repl
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparkJarTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
-        if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
-        if self.parameters: body['parameters'] = self.parameters
-        if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
+        if self.jar_uri is not None:
+            body["jar_uri"] = self.jar_uri
+        if self.main_class_name is not None:
+            body["main_class_name"] = self.main_class_name
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.run_as_repl is not None:
+            body["run_as_repl"] = self.run_as_repl
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkJarTask:
         """Deserializes the SparkJarTask from a dictionary."""
-        return cls(jar_uri=d.get('jar_uri', None),
-                   main_class_name=d.get('main_class_name', None),
-                   parameters=d.get('parameters', None),
-                   run_as_repl=d.get('run_as_repl', None))
+        return cls(
+            jar_uri=d.get("jar_uri", None),
+            main_class_name=d.get("main_class_name", None),
+            parameters=d.get("parameters", None),
+            run_as_repl=d.get("run_as_repl", None),
+        )
 
 
 @dataclass
@@ -5308,25 +6290,33 @@ class SparkPythonTask:
     def as_dict(self) -> dict:
         """Serializes the SparkPythonTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.parameters: body['parameters'] = [v for v in self.parameters]
-        if self.python_file is not None: body['python_file'] = self.python_file
-        if self.source is not None: body['source'] = self.source.value
+        if self.parameters:
+            body["parameters"] = [v for v in self.parameters]
+        if self.python_file is not None:
+            body["python_file"] = self.python_file
+        if self.source is not None:
+            body["source"] = self.source.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparkPythonTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.parameters: body['parameters'] = self.parameters
-        if self.python_file is not None: body['python_file'] = self.python_file
-        if self.source is not None: body['source'] = self.source
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.python_file is not None:
+            body["python_file"] = self.python_file
+        if self.source is not None:
+            body["source"] = self.source
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkPythonTask:
         """Deserializes the SparkPythonTask from a dictionary."""
-        return cls(parameters=d.get('parameters', None),
-                   python_file=d.get('python_file', None),
-                   source=_enum(d, 'source', Source))
+        return cls(
+            parameters=d.get("parameters", None),
+            python_file=d.get("python_file", None),
+            source=_enum(d, "source", Source),
+        )
 
 
 @dataclass
@@ -5341,19 +6331,21 @@ class SparkSubmitTask:
     def as_dict(self) -> dict:
         """Serializes the SparkSubmitTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.parameters: body['parameters'] = [v for v in self.parameters]
+        if self.parameters:
+            body["parameters"] = [v for v in self.parameters]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparkSubmitTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.parameters: body['parameters'] = self.parameters
+        if self.parameters:
+            body["parameters"] = self.parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkSubmitTask:
         """Deserializes the SparkSubmitTask from a dictionary."""
-        return cls(parameters=d.get('parameters', None))
+        return cls(parameters=d.get("parameters", None))
 
 
 @dataclass
@@ -5380,42 +6372,54 @@ class SqlAlertOutput:
     def as_dict(self) -> dict:
         """Serializes the SqlAlertOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert_state is not None: body['alert_state'] = self.alert_state.value
-        if self.output_link is not None: body['output_link'] = self.output_link
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements]
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.alert_state is not None:
+            body["alert_state"] = self.alert_state.value
+        if self.output_link is not None:
+            body["output_link"] = self.output_link
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.sql_statements:
+            body["sql_statements"] = [v.as_dict() for v in self.sql_statements]
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlAlertOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert_state is not None: body['alert_state'] = self.alert_state
-        if self.output_link is not None: body['output_link'] = self.output_link
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.sql_statements: body['sql_statements'] = self.sql_statements
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.alert_state is not None:
+            body["alert_state"] = self.alert_state
+        if self.output_link is not None:
+            body["output_link"] = self.output_link
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.sql_statements:
+            body["sql_statements"] = self.sql_statements
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlAlertOutput:
         """Deserializes the SqlAlertOutput from a dictionary."""
-        return cls(alert_state=_enum(d, 'alert_state', SqlAlertState),
-                   output_link=d.get('output_link', None),
-                   query_text=d.get('query_text', None),
-                   sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            alert_state=_enum(d, "alert_state", SqlAlertState),
+            output_link=d.get("output_link", None),
+            query_text=d.get("query_text", None),
+            sql_statements=_repeated_dict(d, "sql_statements", SqlStatementOutput),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 class SqlAlertState(Enum):
     """The state of the SQL alert.
-    
+
     * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger
     conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions"""
 
-    OK = 'OK'
-    TRIGGERED = 'TRIGGERED'
-    UNKNOWN = 'UNKNOWN'
+    OK = "OK"
+    TRIGGERED = "TRIGGERED"
+    UNKNOWN = "UNKNOWN"
 
 
 @dataclass
@@ -5429,22 +6433,28 @@ class SqlDashboardOutput:
     def as_dict(self) -> dict:
         """Serializes the SqlDashboardOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
-        if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets]
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
+        if self.widgets:
+            body["widgets"] = [v.as_dict() for v in self.widgets]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlDashboardOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
-        if self.widgets: body['widgets'] = self.widgets
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
+        if self.widgets:
+            body["widgets"] = self.widgets
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlDashboardOutput:
         """Deserializes the SqlDashboardOutput from a dictionary."""
-        return cls(warehouse_id=d.get('warehouse_id', None),
-                   widgets=_repeated_dict(d, 'widgets', SqlDashboardWidgetOutput))
+        return cls(
+            warehouse_id=d.get("warehouse_id", None),
+            widgets=_repeated_dict(d, "widgets", SqlDashboardWidgetOutput),
+        )
 
 
 @dataclass
@@ -5473,46 +6483,62 @@ class SqlDashboardWidgetOutput:
     def as_dict(self) -> dict:
         """Serializes the SqlDashboardWidgetOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.error: body['error'] = self.error.as_dict()
-        if self.output_link is not None: body['output_link'] = self.output_link
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.status is not None: body['status'] = self.status.value
-        if self.widget_id is not None: body['widget_id'] = self.widget_id
-        if self.widget_title is not None: body['widget_title'] = self.widget_title
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.error:
+            body["error"] = self.error.as_dict()
+        if self.output_link is not None:
+            body["output_link"] = self.output_link
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.widget_id is not None:
+            body["widget_id"] = self.widget_id
+        if self.widget_title is not None:
+            body["widget_title"] = self.widget_title
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlDashboardWidgetOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.error: body['error'] = self.error
-        if self.output_link is not None: body['output_link'] = self.output_link
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.status is not None: body['status'] = self.status
-        if self.widget_id is not None: body['widget_id'] = self.widget_id
-        if self.widget_title is not None: body['widget_title'] = self.widget_title
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.error:
+            body["error"] = self.error
+        if self.output_link is not None:
+            body["output_link"] = self.output_link
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.status is not None:
+            body["status"] = self.status
+        if self.widget_id is not None:
+            body["widget_id"] = self.widget_id
+        if self.widget_title is not None:
+            body["widget_title"] = self.widget_title
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlDashboardWidgetOutput:
         """Deserializes the SqlDashboardWidgetOutput from a dictionary."""
-        return cls(end_time=d.get('end_time', None),
-                   error=_from_dict(d, 'error', SqlOutputError),
-                   output_link=d.get('output_link', None),
-                   start_time=d.get('start_time', None),
-                   status=_enum(d, 'status', SqlDashboardWidgetOutputStatus),
-                   widget_id=d.get('widget_id', None),
-                   widget_title=d.get('widget_title', None))
+        return cls(
+            end_time=d.get("end_time", None),
+            error=_from_dict(d, "error", SqlOutputError),
+            output_link=d.get("output_link", None),
+            start_time=d.get("start_time", None),
+            status=_enum(d, "status", SqlDashboardWidgetOutputStatus),
+            widget_id=d.get("widget_id", None),
+            widget_title=d.get("widget_title", None),
+        )
 
 
 class SqlDashboardWidgetOutputStatus(Enum):
 
-    CANCELLED = 'CANCELLED'
-    FAILED = 'FAILED'
-    PENDING = 'PENDING'
-    RUNNING = 'RUNNING'
-    SUCCESS = 'SUCCESS'
+    CANCELLED = "CANCELLED"
+    FAILED = "FAILED"
+    PENDING = "PENDING"
+    RUNNING = "RUNNING"
+    SUCCESS = "SUCCESS"
 
 
 @dataclass
@@ -5529,25 +6555,33 @@ class SqlOutput:
     def as_dict(self) -> dict:
         """Serializes the SqlOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert_output: body['alert_output'] = self.alert_output.as_dict()
-        if self.dashboard_output: body['dashboard_output'] = self.dashboard_output.as_dict()
-        if self.query_output: body['query_output'] = self.query_output.as_dict()
+        if self.alert_output:
+            body["alert_output"] = self.alert_output.as_dict()
+        if self.dashboard_output:
+            body["dashboard_output"] = self.dashboard_output.as_dict()
+        if self.query_output:
+            body["query_output"] = self.query_output.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert_output: body['alert_output'] = self.alert_output
-        if self.dashboard_output: body['dashboard_output'] = self.dashboard_output
-        if self.query_output: body['query_output'] = self.query_output
+        if self.alert_output:
+            body["alert_output"] = self.alert_output
+        if self.dashboard_output:
+            body["dashboard_output"] = self.dashboard_output
+        if self.query_output:
+            body["query_output"] = self.query_output
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlOutput:
         """Deserializes the SqlOutput from a dictionary."""
-        return cls(alert_output=_from_dict(d, 'alert_output', SqlAlertOutput),
-                   dashboard_output=_from_dict(d, 'dashboard_output', SqlDashboardOutput),
-                   query_output=_from_dict(d, 'query_output', SqlQueryOutput))
+        return cls(
+            alert_output=_from_dict(d, "alert_output", SqlAlertOutput),
+            dashboard_output=_from_dict(d, "dashboard_output", SqlDashboardOutput),
+            query_output=_from_dict(d, "query_output", SqlQueryOutput),
+        )
 
 
 @dataclass
@@ -5558,19 +6592,21 @@ class SqlOutputError:
     def as_dict(self) -> dict:
         """Serializes the SqlOutputError into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlOutputError into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlOutputError:
         """Deserializes the SqlOutputError from a dictionary."""
-        return cls(message=d.get('message', None))
+        return cls(message=d.get("message", None))
 
 
 @dataclass
@@ -5592,31 +6628,43 @@ class SqlQueryOutput:
     def as_dict(self) -> dict:
         """Serializes the SqlQueryOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
-        if self.output_link is not None: body['output_link'] = self.output_link
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements]
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.endpoint_id is not None:
+            body["endpoint_id"] = self.endpoint_id
+        if self.output_link is not None:
+            body["output_link"] = self.output_link
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.sql_statements:
+            body["sql_statements"] = [v.as_dict() for v in self.sql_statements]
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlQueryOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
-        if self.output_link is not None: body['output_link'] = self.output_link
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.sql_statements: body['sql_statements'] = self.sql_statements
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.endpoint_id is not None:
+            body["endpoint_id"] = self.endpoint_id
+        if self.output_link is not None:
+            body["output_link"] = self.output_link
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.sql_statements:
+            body["sql_statements"] = self.sql_statements
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlQueryOutput:
         """Deserializes the SqlQueryOutput from a dictionary."""
-        return cls(endpoint_id=d.get('endpoint_id', None),
-                   output_link=d.get('output_link', None),
-                   query_text=d.get('query_text', None),
-                   sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            endpoint_id=d.get("endpoint_id", None),
+            output_link=d.get("output_link", None),
+            query_text=d.get("query_text", None),
+            sql_statements=_repeated_dict(d, "sql_statements", SqlStatementOutput),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -5627,19 +6675,21 @@ class SqlStatementOutput:
     def as_dict(self) -> dict:
         """Serializes the SqlStatementOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
+        if self.lookup_key is not None:
+            body["lookup_key"] = self.lookup_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlStatementOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
+        if self.lookup_key is not None:
+            body["lookup_key"] = self.lookup_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlStatementOutput:
         """Deserializes the SqlStatementOutput from a dictionary."""
-        return cls(lookup_key=d.get('lookup_key', None))
+        return cls(lookup_key=d.get("lookup_key", None))
 
 
 @dataclass
@@ -5668,34 +6718,48 @@ class SqlTask:
     def as_dict(self) -> dict:
         """Serializes the SqlTask into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert: body['alert'] = self.alert.as_dict()
-        if self.dashboard: body['dashboard'] = self.dashboard.as_dict()
-        if self.file: body['file'] = self.file.as_dict()
-        if self.parameters: body['parameters'] = self.parameters
-        if self.query: body['query'] = self.query.as_dict()
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.alert:
+            body["alert"] = self.alert.as_dict()
+        if self.dashboard:
+            body["dashboard"] = self.dashboard.as_dict()
+        if self.file:
+            body["file"] = self.file.as_dict()
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.query:
+            body["query"] = self.query.as_dict()
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert: body['alert'] = self.alert
-        if self.dashboard: body['dashboard'] = self.dashboard
-        if self.file: body['file'] = self.file
-        if self.parameters: body['parameters'] = self.parameters
-        if self.query: body['query'] = self.query
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.alert:
+            body["alert"] = self.alert
+        if self.dashboard:
+            body["dashboard"] = self.dashboard
+        if self.file:
+            body["file"] = self.file
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.query:
+            body["query"] = self.query
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTask:
         """Deserializes the SqlTask from a dictionary."""
-        return cls(alert=_from_dict(d, 'alert', SqlTaskAlert),
-                   dashboard=_from_dict(d, 'dashboard', SqlTaskDashboard),
-                   file=_from_dict(d, 'file', SqlTaskFile),
-                   parameters=d.get('parameters', None),
-                   query=_from_dict(d, 'query', SqlTaskQuery),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            alert=_from_dict(d, "alert", SqlTaskAlert),
+            dashboard=_from_dict(d, "dashboard", SqlTaskDashboard),
+            file=_from_dict(d, "file", SqlTaskFile),
+            parameters=d.get("parameters", None),
+            query=_from_dict(d, "query", SqlTaskQuery),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -5712,25 +6776,33 @@ class SqlTaskAlert:
     def as_dict(self) -> dict:
         """Serializes the SqlTaskAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert_id is not None: body['alert_id'] = self.alert_id
-        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
-        if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
+        if self.alert_id is not None:
+            body["alert_id"] = self.alert_id
+        if self.pause_subscriptions is not None:
+            body["pause_subscriptions"] = self.pause_subscriptions
+        if self.subscriptions:
+            body["subscriptions"] = [v.as_dict() for v in self.subscriptions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlTaskAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert_id is not None: body['alert_id'] = self.alert_id
-        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
-        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        if self.alert_id is not None:
+            body["alert_id"] = self.alert_id
+        if self.pause_subscriptions is not None:
+            body["pause_subscriptions"] = self.pause_subscriptions
+        if self.subscriptions:
+            body["subscriptions"] = self.subscriptions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskAlert:
         """Deserializes the SqlTaskAlert from a dictionary."""
-        return cls(alert_id=d.get('alert_id', None),
-                   pause_subscriptions=d.get('pause_subscriptions', None),
-                   subscriptions=_repeated_dict(d, 'subscriptions', SqlTaskSubscription))
+        return cls(
+            alert_id=d.get("alert_id", None),
+            pause_subscriptions=d.get("pause_subscriptions", None),
+            subscriptions=_repeated_dict(d, "subscriptions", SqlTaskSubscription),
+        )
 
 
 @dataclass
@@ -5750,28 +6822,38 @@ class SqlTaskDashboard:
     def as_dict(self) -> dict:
         """Serializes the SqlTaskDashboard into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
-        if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.pause_subscriptions is not None:
+            body["pause_subscriptions"] = self.pause_subscriptions
+        if self.subscriptions:
+            body["subscriptions"] = [v.as_dict() for v in self.subscriptions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlTaskDashboard into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
-        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.pause_subscriptions is not None:
+            body["pause_subscriptions"] = self.pause_subscriptions
+        if self.subscriptions:
+            body["subscriptions"] = self.subscriptions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskDashboard:
         """Deserializes the SqlTaskDashboard from a dictionary."""
-        return cls(custom_subject=d.get('custom_subject', None),
-                   dashboard_id=d.get('dashboard_id', None),
-                   pause_subscriptions=d.get('pause_subscriptions', None),
-                   subscriptions=_repeated_dict(d, 'subscriptions', SqlTaskSubscription))
+        return cls(
+            custom_subject=d.get("custom_subject", None),
+            dashboard_id=d.get("dashboard_id", None),
+            pause_subscriptions=d.get("pause_subscriptions", None),
+            subscriptions=_repeated_dict(d, "subscriptions", SqlTaskSubscription),
+        )
 
 
 @dataclass
@@ -5792,21 +6874,25 @@ class SqlTaskFile:
     def as_dict(self) -> dict:
         """Serializes the SqlTaskFile into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.source is not None: body['source'] = self.source.value
+        if self.path is not None:
+            body["path"] = self.path
+        if self.source is not None:
+            body["source"] = self.source.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlTaskFile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.source is not None: body['source'] = self.source
+        if self.path is not None:
+            body["path"] = self.path
+        if self.source is not None:
+            body["source"] = self.source
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskFile:
         """Deserializes the SqlTaskFile from a dictionary."""
-        return cls(path=d.get('path', None), source=_enum(d, 'source', Source))
+        return cls(path=d.get("path", None), source=_enum(d, "source", Source))
 
 
 @dataclass
@@ -5817,19 +6903,21 @@ class SqlTaskQuery:
     def as_dict(self) -> dict:
         """Serializes the SqlTaskQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlTaskQuery into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskQuery:
         """Deserializes the SqlTaskQuery from a dictionary."""
-        return cls(query_id=d.get('query_id', None))
+        return cls(query_id=d.get("query_id", None))
 
 
 @dataclass
@@ -5846,21 +6934,28 @@ class SqlTaskSubscription:
     def as_dict(self) -> dict:
         """Serializes the SqlTaskSubscription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_id is not None: body['destination_id'] = self.destination_id
-        if self.user_name is not None: body['user_name'] = self.user_name
+        if self.destination_id is not None:
+            body["destination_id"] = self.destination_id
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SqlTaskSubscription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_id is not None: body['destination_id'] = self.destination_id
-        if self.user_name is not None: body['user_name'] = self.user_name
+        if self.destination_id is not None:
+            body["destination_id"] = self.destination_id
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription:
         """Deserializes the SqlTaskSubscription from a dictionary."""
-        return cls(destination_id=d.get('destination_id', None), user_name=d.get('user_name', None))
+        return cls(
+            destination_id=d.get("destination_id", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -5931,58 +7026,87 @@ def as_dict(self) -> dict:
         """Serializes the SubmitRun into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
-        if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
-        if self.git_source: body['git_source'] = self.git_source.as_dict()
-        if self.health: body['health'] = self.health.as_dict()
-        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
-        if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
-        if self.queue: body['queue'] = self.queue.as_dict()
-        if self.run_as: body['run_as'] = self.run_as.as_dict()
-        if self.run_name is not None: body['run_name'] = self.run_name
-        if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks]
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications.as_dict()
+        if self.environments:
+            body["environments"] = [v.as_dict() for v in self.environments]
+        if self.git_source:
+            body["git_source"] = self.git_source.as_dict()
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.idempotency_token is not None:
+            body["idempotency_token"] = self.idempotency_token
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings.as_dict()
+        if self.queue:
+            body["queue"] = self.queue.as_dict()
+        if self.run_as:
+            body["run_as"] = self.run_as.as_dict()
+        if self.run_name is not None:
+            body["run_name"] = self.run_name
+        if self.tasks:
+            body["tasks"] = [v.as_dict() for v in self.tasks]
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SubmitRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.email_notifications: body['email_notifications'] = self.email_notifications
-        if self.environments: body['environments'] = self.environments
-        if self.git_source: body['git_source'] = self.git_source
-        if self.health: body['health'] = self.health
-        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
-        if self.notification_settings: body['notification_settings'] = self.notification_settings
-        if self.queue: body['queue'] = self.queue
-        if self.run_as: body['run_as'] = self.run_as
-        if self.run_name is not None: body['run_name'] = self.run_name
-        if self.tasks: body['tasks'] = self.tasks
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications
+        if self.environments:
+            body["environments"] = self.environments
+        if self.git_source:
+            body["git_source"] = self.git_source
+        if self.health:
+            body["health"] = self.health
+        if self.idempotency_token is not None:
+            body["idempotency_token"] = self.idempotency_token
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings
+        if self.queue:
+            body["queue"] = self.queue
+        if self.run_as:
+            body["run_as"] = self.run_as
+        if self.run_name is not None:
+            body["run_name"] = self.run_name
+        if self.tasks:
+            body["tasks"] = self.tasks
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
         """Deserializes the SubmitRun from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
-                   budget_policy_id=d.get('budget_policy_id', None),
-                   email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
-                   environments=_repeated_dict(d, 'environments', JobEnvironment),
-                   git_source=_from_dict(d, 'git_source', GitSource),
-                   health=_from_dict(d, 'health', JobsHealthRules),
-                   idempotency_token=d.get('idempotency_token', None),
-                   notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
-                   queue=_from_dict(d, 'queue', QueueSettings),
-                   run_as=_from_dict(d, 'run_as', JobRunAs),
-                   run_name=d.get('run_name', None),
-                   tasks=_repeated_dict(d, 'tasks', SubmitTask),
-                   timeout_seconds=d.get('timeout_seconds', None),
-                   webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest),
+            budget_policy_id=d.get("budget_policy_id", None),
+            email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
+            environments=_repeated_dict(d, "environments", JobEnvironment),
+            git_source=_from_dict(d, "git_source", GitSource),
+            health=_from_dict(d, "health", JobsHealthRules),
+            idempotency_token=d.get("idempotency_token", None),
+            notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings),
+            queue=_from_dict(d, "queue", QueueSettings),
+            run_as=_from_dict(d, "run_as", JobRunAs),
+            run_name=d.get("run_name", None),
+            tasks=_repeated_dict(d, "tasks", SubmitTask),
+            timeout_seconds=d.get("timeout_seconds", None),
+            webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
+        )
 
 
 @dataclass
@@ -5995,19 +7119,21 @@ class SubmitRunResponse:
     def as_dict(self) -> dict:
         """Serializes the SubmitRunResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SubmitRunResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRunResponse:
         """Deserializes the SubmitRunResponse from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -6127,92 +7253,142 @@ def as_dict(self) -> dict:
         """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.clean_rooms_notebook_task:
-            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
-        if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
-        if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
-        if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
-        if self.description is not None: body['description'] = self.description
-        if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
-        if self.health: body['health'] = self.health.as_dict()
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
-        if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
-        if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
-        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
-        if self.run_if is not None: body['run_if'] = self.run_if.value
-        if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
-        if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
-        if self.task_key is not None: body['task_key'] = self.task_key
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
+            body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict()
+        if self.condition_task:
+            body["condition_task"] = self.condition_task.as_dict()
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task.as_dict()
+        if self.depends_on:
+            body["depends_on"] = [v.as_dict() for v in self.depends_on]
+        if self.description is not None:
+            body["description"] = self.description
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications.as_dict()
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.for_each_task:
+            body["for_each_task"] = self.for_each_task.as_dict()
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster.as_dict()
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task.as_dict()
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings.as_dict()
+        if self.pipeline_task:
+            body["pipeline_task"] = self.pipeline_task.as_dict()
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task.as_dict()
+        if self.run_if is not None:
+            body["run_if"] = self.run_if.value
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task.as_dict()
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task.as_dict()
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task.as_dict()
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task.as_dict()
+        if self.sql_task:
+            body["sql_task"] = self.sql_task.as_dict()
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SubmitTask into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
-        if self.condition_task: body['condition_task'] = self.condition_task
-        if self.dbt_task: body['dbt_task'] = self.dbt_task
-        if self.depends_on: body['depends_on'] = self.depends_on
-        if self.description is not None: body['description'] = self.description
-        if self.email_notifications: body['email_notifications'] = self.email_notifications
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.for_each_task: body['for_each_task'] = self.for_each_task
-        if self.health: body['health'] = self.health
-        if self.libraries: body['libraries'] = self.libraries
-        if self.new_cluster: body['new_cluster'] = self.new_cluster
-        if self.notebook_task: body['notebook_task'] = self.notebook_task
-        if self.notification_settings: body['notification_settings'] = self.notification_settings
-        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
-        if self.run_if is not None: body['run_if'] = self.run_if
-        if self.run_job_task: body['run_job_task'] = self.run_job_task
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
-        if self.sql_task: body['sql_task'] = self.sql_task
-        if self.task_key is not None: body['task_key'] = self.task_key
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        if self.clean_rooms_notebook_task:
+            body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task
+        if self.condition_task:
+            body["condition_task"] = self.condition_task
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task
+        if self.depends_on:
+            body["depends_on"] = self.depends_on
+        if self.description is not None:
+            body["description"] = self.description
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.for_each_task:
+            body["for_each_task"] = self.for_each_task
+        if self.health:
+            body["health"] = self.health
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings
+        if self.pipeline_task:
+            body["pipeline_task"] = self.pipeline_task
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task
+        if self.run_if is not None:
+            body["run_if"] = self.run_if
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task
+        if self.sql_task:
+            body["sql_task"] = self.sql_task
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitTask:
         """Deserializes the SubmitTask from a dictionary."""
-        return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
-                                                        CleanRoomsNotebookTask),
-                   condition_task=_from_dict(d, 'condition_task', ConditionTask),
-                   dbt_task=_from_dict(d, 'dbt_task', DbtTask),
-                   depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
-                   description=d.get('description', None),
-                   email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
-                   environment_key=d.get('environment_key', None),
-                   existing_cluster_id=d.get('existing_cluster_id', None),
-                   for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
-                   health=_from_dict(d, 'health', JobsHealthRules),
-                   libraries=_repeated_dict(d, 'libraries', compute.Library),
-                   new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec),
-                   notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
-                   notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings),
-                   pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
-                   python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
-                   run_if=_enum(d, 'run_if', RunIf),
-                   run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
-                   spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
-                   spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
-                   spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask),
-                   sql_task=_from_dict(d, 'sql_task', SqlTask),
-                   task_key=d.get('task_key', None),
-                   timeout_seconds=d.get('timeout_seconds', None),
-                   webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
+        return cls(
+            clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask),
+            condition_task=_from_dict(d, "condition_task", ConditionTask),
+            dbt_task=_from_dict(d, "dbt_task", DbtTask),
+            depends_on=_repeated_dict(d, "depends_on", TaskDependency),
+            description=d.get("description", None),
+            email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications),
+            environment_key=d.get("environment_key", None),
+            existing_cluster_id=d.get("existing_cluster_id", None),
+            for_each_task=_from_dict(d, "for_each_task", ForEachTask),
+            health=_from_dict(d, "health", JobsHealthRules),
+            libraries=_repeated_dict(d, "libraries", compute.Library),
+            new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec),
+            notebook_task=_from_dict(d, "notebook_task", NotebookTask),
+            notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings),
+            pipeline_task=_from_dict(d, "pipeline_task", PipelineTask),
+            python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask),
+            run_if=_enum(d, "run_if", RunIf),
+            run_job_task=_from_dict(d, "run_job_task", RunJobTask),
+            spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask),
+            spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask),
+            spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask),
+            sql_task=_from_dict(d, "sql_task", SqlTask),
+            task_key=d.get("task_key", None),
+            timeout_seconds=d.get("timeout_seconds", None),
+            webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
+        )
 
 
 @dataclass
@@ -6236,32 +7412,38 @@ class TableUpdateTriggerConfiguration:
     def as_dict(self) -> dict:
         """Serializes the TableUpdateTriggerConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.condition is not None: body['condition'] = self.condition.value
+        if self.condition is not None:
+            body["condition"] = self.condition.value
         if self.min_time_between_triggers_seconds is not None:
-            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
-        if self.table_names: body['table_names'] = [v for v in self.table_names]
+            body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds
+        if self.table_names:
+            body["table_names"] = [v for v in self.table_names]
         if self.wait_after_last_change_seconds is not None:
-            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+            body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableUpdateTriggerConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.condition is not None: body['condition'] = self.condition
+        if self.condition is not None:
+            body["condition"] = self.condition
         if self.min_time_between_triggers_seconds is not None:
-            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
-        if self.table_names: body['table_names'] = self.table_names
+            body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds
+        if self.table_names:
+            body["table_names"] = self.table_names
         if self.wait_after_last_change_seconds is not None:
-            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+            body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableUpdateTriggerConfiguration:
         """Deserializes the TableUpdateTriggerConfiguration from a dictionary."""
-        return cls(condition=_enum(d, 'condition', Condition),
-                   min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None),
-                   table_names=d.get('table_names', None),
-                   wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None))
+        return cls(
+            condition=_enum(d, "condition", Condition),
+            min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None),
+            table_names=d.get("table_names", None),
+            wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None),
+        )
 
 
 @dataclass
@@ -6406,111 +7588,167 @@ def as_dict(self) -> dict:
         """Serializes the Task into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.clean_rooms_notebook_task:
-            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
-        if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
-        if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
-        if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
-        if self.description is not None: body['description'] = self.description
+            body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict()
+        if self.condition_task:
+            body["condition_task"] = self.condition_task.as_dict()
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task.as_dict()
+        if self.depends_on:
+            body["depends_on"] = [v.as_dict() for v in self.depends_on]
+        if self.description is not None:
+            body["description"] = self.description
         if self.disable_auto_optimization is not None:
-            body['disable_auto_optimization'] = self.disable_auto_optimization
-        if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict()
-        if self.health: body['health'] = self.health.as_dict()
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.max_retries is not None: body['max_retries'] = self.max_retries
+            body["disable_auto_optimization"] = self.disable_auto_optimization
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications.as_dict()
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.for_each_task:
+            body["for_each_task"] = self.for_each_task.as_dict()
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.max_retries is not None:
+            body["max_retries"] = self.max_retries
         if self.min_retry_interval_millis is not None:
-            body['min_retry_interval_millis'] = self.min_retry_interval_millis
-        if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
-        if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict()
-        if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
-        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict()
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict()
-        if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout
-        if self.run_if is not None: body['run_if'] = self.run_if.value
-        if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict()
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict()
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict()
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict()
-        if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
-        if self.task_key is not None: body['task_key'] = self.task_key
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
+            body["min_retry_interval_millis"] = self.min_retry_interval_millis
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster.as_dict()
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task.as_dict()
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings.as_dict()
+        if self.pipeline_task:
+            body["pipeline_task"] = self.pipeline_task.as_dict()
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task.as_dict()
+        if self.retry_on_timeout is not None:
+            body["retry_on_timeout"] = self.retry_on_timeout
+        if self.run_if is not None:
+            body["run_if"] = self.run_if.value
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task.as_dict()
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task.as_dict()
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task.as_dict()
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task.as_dict()
+        if self.sql_task:
+            body["sql_task"] = self.sql_task.as_dict()
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Task into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
-        if self.condition_task: body['condition_task'] = self.condition_task
-        if self.dbt_task: body['dbt_task'] = self.dbt_task
-        if self.depends_on: body['depends_on'] = self.depends_on
-        if self.description is not None: body['description'] = self.description
+        if self.clean_rooms_notebook_task:
+            body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task
+        if self.condition_task:
+            body["condition_task"] = self.condition_task
+        if self.dbt_task:
+            body["dbt_task"] = self.dbt_task
+        if self.depends_on:
+            body["depends_on"] = self.depends_on
+        if self.description is not None:
+            body["description"] = self.description
         if self.disable_auto_optimization is not None:
-            body['disable_auto_optimization'] = self.disable_auto_optimization
-        if self.email_notifications: body['email_notifications'] = self.email_notifications
-        if self.environment_key is not None: body['environment_key'] = self.environment_key
-        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
-        if self.for_each_task: body['for_each_task'] = self.for_each_task
-        if self.health: body['health'] = self.health
-        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
-        if self.libraries: body['libraries'] = self.libraries
-        if self.max_retries is not None: body['max_retries'] = self.max_retries
+            body["disable_auto_optimization"] = self.disable_auto_optimization
+        if self.email_notifications:
+            body["email_notifications"] = self.email_notifications
+        if self.environment_key is not None:
+            body["environment_key"] = self.environment_key
+        if self.existing_cluster_id is not None:
+            body["existing_cluster_id"] = self.existing_cluster_id
+        if self.for_each_task:
+            body["for_each_task"] = self.for_each_task
+        if self.health:
+            body["health"] = self.health
+        if self.job_cluster_key is not None:
+            body["job_cluster_key"] = self.job_cluster_key
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.max_retries is not None:
+            body["max_retries"] = self.max_retries
         if self.min_retry_interval_millis is not None:
-            body['min_retry_interval_millis'] = self.min_retry_interval_millis
-        if self.new_cluster: body['new_cluster'] = self.new_cluster
-        if self.notebook_task: body['notebook_task'] = self.notebook_task
-        if self.notification_settings: body['notification_settings'] = self.notification_settings
-        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
-        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
-        if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout
-        if self.run_if is not None: body['run_if'] = self.run_if
-        if self.run_job_task: body['run_job_task'] = self.run_job_task
-        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
-        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
-        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
-        if self.sql_task: body['sql_task'] = self.sql_task
-        if self.task_key is not None: body['task_key'] = self.task_key
-        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
-        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+            body["min_retry_interval_millis"] = self.min_retry_interval_millis
+        if self.new_cluster:
+            body["new_cluster"] = self.new_cluster
+        if self.notebook_task:
+            body["notebook_task"] = self.notebook_task
+        if self.notification_settings:
+            body["notification_settings"] = self.notification_settings
+        if self.pipeline_task:
+            body["pipeline_task"] = self.pipeline_task
+        if self.python_wheel_task:
+            body["python_wheel_task"] = self.python_wheel_task
+        if self.retry_on_timeout is not None:
+            body["retry_on_timeout"] = self.retry_on_timeout
+        if self.run_if is not None:
+            body["run_if"] = self.run_if
+        if self.run_job_task:
+            body["run_job_task"] = self.run_job_task
+        if self.spark_jar_task:
+            body["spark_jar_task"] = self.spark_jar_task
+        if self.spark_python_task:
+            body["spark_python_task"] = self.spark_python_task
+        if self.spark_submit_task:
+            body["spark_submit_task"] = self.spark_submit_task
+        if self.sql_task:
+            body["sql_task"] = self.sql_task
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
+        if self.timeout_seconds is not None:
+            body["timeout_seconds"] = self.timeout_seconds
+        if self.webhook_notifications:
+            body["webhook_notifications"] = self.webhook_notifications
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Task:
         """Deserializes the Task from a dictionary."""
-        return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
-                                                        CleanRoomsNotebookTask),
-                   condition_task=_from_dict(d, 'condition_task', ConditionTask),
-                   dbt_task=_from_dict(d, 'dbt_task', DbtTask),
-                   depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
-                   description=d.get('description', None),
-                   disable_auto_optimization=d.get('disable_auto_optimization', None),
-                   email_notifications=_from_dict(d, 'email_notifications', TaskEmailNotifications),
-                   environment_key=d.get('environment_key', None),
-                   existing_cluster_id=d.get('existing_cluster_id', None),
-                   for_each_task=_from_dict(d, 'for_each_task', ForEachTask),
-                   health=_from_dict(d, 'health', JobsHealthRules),
-                   job_cluster_key=d.get('job_cluster_key', None),
-                   libraries=_repeated_dict(d, 'libraries', compute.Library),
-                   max_retries=d.get('max_retries', None),
-                   min_retry_interval_millis=d.get('min_retry_interval_millis', None),
-                   new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec),
-                   notebook_task=_from_dict(d, 'notebook_task', NotebookTask),
-                   notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings),
-                   pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask),
-                   python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask),
-                   retry_on_timeout=d.get('retry_on_timeout', None),
-                   run_if=_enum(d, 'run_if', RunIf),
-                   run_job_task=_from_dict(d, 'run_job_task', RunJobTask),
-                   spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask),
-                   spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask),
-                   spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask),
-                   sql_task=_from_dict(d, 'sql_task', SqlTask),
-                   task_key=d.get('task_key', None),
-                   timeout_seconds=d.get('timeout_seconds', None),
-                   webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications))
+        return cls(
+            clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask),
+            condition_task=_from_dict(d, "condition_task", ConditionTask),
+            dbt_task=_from_dict(d, "dbt_task", DbtTask),
+            depends_on=_repeated_dict(d, "depends_on", TaskDependency),
+            description=d.get("description", None),
+            disable_auto_optimization=d.get("disable_auto_optimization", None),
+            email_notifications=_from_dict(d, "email_notifications", TaskEmailNotifications),
+            environment_key=d.get("environment_key", None),
+            existing_cluster_id=d.get("existing_cluster_id", None),
+            for_each_task=_from_dict(d, "for_each_task", ForEachTask),
+            health=_from_dict(d, "health", JobsHealthRules),
+            job_cluster_key=d.get("job_cluster_key", None),
+            libraries=_repeated_dict(d, "libraries", compute.Library),
+            max_retries=d.get("max_retries", None),
+            min_retry_interval_millis=d.get("min_retry_interval_millis", None),
+            new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec),
+            notebook_task=_from_dict(d, "notebook_task", NotebookTask),
+            notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings),
+            pipeline_task=_from_dict(d, "pipeline_task", PipelineTask),
+            python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask),
+            retry_on_timeout=d.get("retry_on_timeout", None),
+            run_if=_enum(d, "run_if", RunIf),
+            run_job_task=_from_dict(d, "run_job_task", RunJobTask),
+            spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask),
+            spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask),
+            spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask),
+            sql_task=_from_dict(d, "sql_task", SqlTask),
+            task_key=d.get("task_key", None),
+            timeout_seconds=d.get("timeout_seconds", None),
+            webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications),
+        )
 
 
 @dataclass
@@ -6525,21 +7763,25 @@ class TaskDependency:
     def as_dict(self) -> dict:
         """Serializes the TaskDependency into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.outcome is not None: body['outcome'] = self.outcome
-        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.outcome is not None:
+            body["outcome"] = self.outcome
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TaskDependency into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.outcome is not None: body['outcome'] = self.outcome
-        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.outcome is not None:
+            body["outcome"] = self.outcome
+        if self.task_key is not None:
+            body["task_key"] = self.task_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskDependency:
         """Deserializes the TaskDependency from a dictionary."""
-        return cls(outcome=d.get('outcome', None), task_key=d.get('task_key', None))
+        return cls(outcome=d.get("outcome", None), task_key=d.get("task_key", None))
 
 
 @dataclass
@@ -6581,42 +7823,47 @@ def as_dict(self) -> dict:
         """Serializes the TaskEmailNotifications into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         if self.on_duration_warning_threshold_exceeded:
-            body['on_duration_warning_threshold_exceeded'] = [
-                v for v in self.on_duration_warning_threshold_exceeded
-            ]
-        if self.on_failure: body['on_failure'] = [v for v in self.on_failure]
-        if self.on_start: body['on_start'] = [v for v in self.on_start]
+            body["on_duration_warning_threshold_exceeded"] = [v for v in self.on_duration_warning_threshold_exceeded]
+        if self.on_failure:
+            body["on_failure"] = [v for v in self.on_failure]
+        if self.on_start:
+            body["on_start"] = [v for v in self.on_start]
         if self.on_streaming_backlog_exceeded:
-            body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded]
-        if self.on_success: body['on_success'] = [v for v in self.on_success]
+            body["on_streaming_backlog_exceeded"] = [v for v in self.on_streaming_backlog_exceeded]
+        if self.on_success:
+            body["on_success"] = [v for v in self.on_success]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TaskEmailNotifications into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         if self.on_duration_warning_threshold_exceeded:
-            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
-        if self.on_failure: body['on_failure'] = self.on_failure
-        if self.on_start: body['on_start'] = self.on_start
+            body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure:
+            body["on_failure"] = self.on_failure
+        if self.on_start:
+            body["on_start"] = self.on_start
         if self.on_streaming_backlog_exceeded:
-            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
-        if self.on_success: body['on_success'] = self.on_success
+            body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded
+        if self.on_success:
+            body["on_success"] = self.on_success
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskEmailNotifications:
         """Deserializes the TaskEmailNotifications from a dictionary."""
-        return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None),
-                   on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded',
-                                                                None),
-                   on_failure=d.get('on_failure', None),
-                   on_start=d.get('on_start', None),
-                   on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None),
-                   on_success=d.get('on_success', None))
+        return cls(
+            no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None),
+            on_duration_warning_threshold_exceeded=d.get("on_duration_warning_threshold_exceeded", None),
+            on_failure=d.get("on_failure", None),
+            on_start=d.get("on_start", None),
+            on_streaming_backlog_exceeded=d.get("on_streaming_backlog_exceeded", None),
+            on_success=d.get("on_success", None),
+        )
 
 
 @dataclass
@@ -6637,29 +7884,33 @@ class TaskNotificationSettings:
     def as_dict(self) -> dict:
         """Serializes the TaskNotificationSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt
+        if self.alert_on_last_attempt is not None:
+            body["alert_on_last_attempt"] = self.alert_on_last_attempt
         if self.no_alert_for_canceled_runs is not None:
-            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+            body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TaskNotificationSettings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt
+        if self.alert_on_last_attempt is not None:
+            body["alert_on_last_attempt"] = self.alert_on_last_attempt
         if self.no_alert_for_canceled_runs is not None:
-            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+            body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs
         if self.no_alert_for_skipped_runs is not None:
-            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+            body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings:
         """Deserializes the TaskNotificationSettings from a dictionary."""
-        return cls(alert_on_last_attempt=d.get('alert_on_last_attempt', None),
-                   no_alert_for_canceled_runs=d.get('no_alert_for_canceled_runs', None),
-                   no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None))
+        return cls(
+            alert_on_last_attempt=d.get("alert_on_last_attempt", None),
+            no_alert_for_canceled_runs=d.get("no_alert_for_canceled_runs", None),
+            no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None),
+        )
 
 
 class TerminationCodeCode(Enum):
@@ -6696,32 +7947,33 @@ class TerminationCodeCode(Enum):
     run failed due to a cloud provider issue. Refer to the state message for further details. *
     `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
     limit.
-    
-    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
 
-    BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED'
-    CANCELED = 'CANCELED'
-    CLOUD_FAILURE = 'CLOUD_FAILURE'
-    CLUSTER_ERROR = 'CLUSTER_ERROR'
-    CLUSTER_REQUEST_LIMIT_EXCEEDED = 'CLUSTER_REQUEST_LIMIT_EXCEEDED'
-    DRIVER_ERROR = 'DRIVER_ERROR'
-    FEATURE_DISABLED = 'FEATURE_DISABLED'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    INVALID_CLUSTER_REQUEST = 'INVALID_CLUSTER_REQUEST'
-    INVALID_RUN_CONFIGURATION = 'INVALID_RUN_CONFIGURATION'
-    LIBRARY_INSTALLATION_ERROR = 'LIBRARY_INSTALLATION_ERROR'
-    MAX_CONCURRENT_RUNS_EXCEEDED = 'MAX_CONCURRENT_RUNS_EXCEEDED'
-    MAX_JOB_QUEUE_SIZE_EXCEEDED = 'MAX_JOB_QUEUE_SIZE_EXCEEDED'
-    MAX_SPARK_CONTEXTS_EXCEEDED = 'MAX_SPARK_CONTEXTS_EXCEEDED'
-    REPOSITORY_CHECKOUT_FAILED = 'REPOSITORY_CHECKOUT_FAILED'
-    RESOURCE_NOT_FOUND = 'RESOURCE_NOT_FOUND'
-    RUN_EXECUTION_ERROR = 'RUN_EXECUTION_ERROR'
-    SKIPPED = 'SKIPPED'
-    STORAGE_ACCESS_ERROR = 'STORAGE_ACCESS_ERROR'
-    SUCCESS = 'SUCCESS'
-    UNAUTHORIZED_ERROR = 'UNAUTHORIZED_ERROR'
-    USER_CANCELED = 'USER_CANCELED'
-    WORKSPACE_RUN_LIMIT_EXCEEDED = 'WORKSPACE_RUN_LIMIT_EXCEEDED'
+    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
+    """
+
+    BUDGET_POLICY_LIMIT_EXCEEDED = "BUDGET_POLICY_LIMIT_EXCEEDED"
+    CANCELED = "CANCELED"
+    CLOUD_FAILURE = "CLOUD_FAILURE"
+    CLUSTER_ERROR = "CLUSTER_ERROR"
+    CLUSTER_REQUEST_LIMIT_EXCEEDED = "CLUSTER_REQUEST_LIMIT_EXCEEDED"
+    DRIVER_ERROR = "DRIVER_ERROR"
+    FEATURE_DISABLED = "FEATURE_DISABLED"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    INVALID_CLUSTER_REQUEST = "INVALID_CLUSTER_REQUEST"
+    INVALID_RUN_CONFIGURATION = "INVALID_RUN_CONFIGURATION"
+    LIBRARY_INSTALLATION_ERROR = "LIBRARY_INSTALLATION_ERROR"
+    MAX_CONCURRENT_RUNS_EXCEEDED = "MAX_CONCURRENT_RUNS_EXCEEDED"
+    MAX_JOB_QUEUE_SIZE_EXCEEDED = "MAX_JOB_QUEUE_SIZE_EXCEEDED"
+    MAX_SPARK_CONTEXTS_EXCEEDED = "MAX_SPARK_CONTEXTS_EXCEEDED"
+    REPOSITORY_CHECKOUT_FAILED = "REPOSITORY_CHECKOUT_FAILED"
+    RESOURCE_NOT_FOUND = "RESOURCE_NOT_FOUND"
+    RUN_EXECUTION_ERROR = "RUN_EXECUTION_ERROR"
+    SKIPPED = "SKIPPED"
+    STORAGE_ACCESS_ERROR = "STORAGE_ACCESS_ERROR"
+    SUCCESS = "SUCCESS"
+    UNAUTHORIZED_ERROR = "UNAUTHORIZED_ERROR"
+    USER_CANCELED = "USER_CANCELED"
+    WORKSPACE_RUN_LIMIT_EXCEEDED = "WORKSPACE_RUN_LIMIT_EXCEEDED"
 
 
 @dataclass
@@ -6779,25 +8031,33 @@ class TerminationDetails:
     def as_dict(self) -> dict:
         """Serializes the TerminationDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.code is not None: body['code'] = self.code.value
-        if self.message is not None: body['message'] = self.message
-        if self.type is not None: body['type'] = self.type.value
+        if self.code is not None:
+            body["code"] = self.code.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TerminationDetails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.code is not None: body['code'] = self.code
-        if self.message is not None: body['message'] = self.message
-        if self.type is not None: body['type'] = self.type
+        if self.code is not None:
+            body["code"] = self.code
+        if self.message is not None:
+            body["message"] = self.message
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationDetails:
         """Deserializes the TerminationDetails from a dictionary."""
-        return cls(code=_enum(d, 'code', TerminationCodeCode),
-                   message=d.get('message', None),
-                   type=_enum(d, 'type', TerminationTypeType))
+        return cls(
+            code=_enum(d, "code", TerminationCodeCode),
+            message=d.get("message", None),
+            type=_enum(d, "type", TerminationTypeType),
+        )
 
 
 class TerminationTypeType(Enum):
@@ -6806,13 +8066,13 @@ class TerminationTypeType(Enum):
     * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job
     configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud
     provider.
-    
+
     [status page]: https://status.databricks.com/"""
 
-    CLIENT_ERROR = 'CLIENT_ERROR'
-    CLOUD_FAILURE = 'CLOUD_FAILURE'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    SUCCESS = 'SUCCESS'
+    CLIENT_ERROR = "CLIENT_ERROR"
+    CLOUD_FAILURE = "CLOUD_FAILURE"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    SUCCESS = "SUCCESS"
 
 
 @dataclass
@@ -6825,19 +8085,21 @@ class TriggerInfo:
     def as_dict(self) -> dict:
         """Serializes the TriggerInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TriggerInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggerInfo:
         """Deserializes the TriggerInfo from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -6859,36 +8121,48 @@ class TriggerSettings:
     def as_dict(self) -> dict:
         """Serializes the TriggerSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict()
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
-        if self.periodic: body['periodic'] = self.periodic.as_dict()
-        if self.table: body['table'] = self.table.as_dict()
-        if self.table_update: body['table_update'] = self.table_update.as_dict()
+        if self.file_arrival:
+            body["file_arrival"] = self.file_arrival.as_dict()
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status.value
+        if self.periodic:
+            body["periodic"] = self.periodic.as_dict()
+        if self.table:
+            body["table"] = self.table.as_dict()
+        if self.table_update:
+            body["table_update"] = self.table_update.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TriggerSettings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_arrival: body['file_arrival'] = self.file_arrival
-        if self.pause_status is not None: body['pause_status'] = self.pause_status
-        if self.periodic: body['periodic'] = self.periodic
-        if self.table: body['table'] = self.table
-        if self.table_update: body['table_update'] = self.table_update
+        if self.file_arrival:
+            body["file_arrival"] = self.file_arrival
+        if self.pause_status is not None:
+            body["pause_status"] = self.pause_status
+        if self.periodic:
+            body["periodic"] = self.periodic
+        if self.table:
+            body["table"] = self.table
+        if self.table_update:
+            body["table_update"] = self.table_update
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggerSettings:
         """Deserializes the TriggerSettings from a dictionary."""
-        return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration),
-                   pause_status=_enum(d, 'pause_status', PauseStatus),
-                   periodic=_from_dict(d, 'periodic', PeriodicTriggerConfiguration),
-                   table=_from_dict(d, 'table', TableUpdateTriggerConfiguration),
-                   table_update=_from_dict(d, 'table_update', TableUpdateTriggerConfiguration))
+        return cls(
+            file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration),
+            pause_status=_enum(d, "pause_status", PauseStatus),
+            periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration),
+            table=_from_dict(d, "table", TableUpdateTriggerConfiguration),
+            table_update=_from_dict(d, "table_update", TableUpdateTriggerConfiguration),
+        )
 
 
 class TriggerType(Enum):
     """The type of trigger that fired this run.
-    
+
     * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`:
     One time triggers that fire a single run. This occurs you triggered a single run on demand
     through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a
@@ -6898,12 +8172,12 @@ class TriggerType(Enum):
     triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
     restart a continuous job run."""
 
-    FILE_ARRIVAL = 'FILE_ARRIVAL'
-    ONE_TIME = 'ONE_TIME'
-    PERIODIC = 'PERIODIC'
-    RETRY = 'RETRY'
-    RUN_JOB_TASK = 'RUN_JOB_TASK'
-    TABLE = 'TABLE'
+    FILE_ARRIVAL = "FILE_ARRIVAL"
+    ONE_TIME = "ONE_TIME"
+    PERIODIC = "PERIODIC"
+    RETRY = "RETRY"
+    RUN_JOB_TASK = "RUN_JOB_TASK"
+    TABLE = "TABLE"
 
 
 @dataclass
@@ -6930,25 +8204,33 @@ class UpdateJob:
     def as_dict(self) -> dict:
         """Serializes the UpdateJob into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.fields_to_remove: body['fields_to_remove'] = [v for v in self.fields_to_remove]
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.new_settings: body['new_settings'] = self.new_settings.as_dict()
+        if self.fields_to_remove:
+            body["fields_to_remove"] = [v for v in self.fields_to_remove]
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.new_settings:
+            body["new_settings"] = self.new_settings.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateJob into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.fields_to_remove: body['fields_to_remove'] = self.fields_to_remove
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.new_settings: body['new_settings'] = self.new_settings
+        if self.fields_to_remove:
+            body["fields_to_remove"] = self.fields_to_remove
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.new_settings:
+            body["new_settings"] = self.new_settings
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateJob:
         """Deserializes the UpdateJob from a dictionary."""
-        return cls(fields_to_remove=d.get('fields_to_remove', None),
-                   job_id=d.get('job_id', None),
-                   new_settings=_from_dict(d, 'new_settings', JobSettings))
+        return cls(
+            fields_to_remove=d.get("fields_to_remove", None),
+            job_id=d.get("job_id", None),
+            new_settings=_from_dict(d, "new_settings", JobSettings),
+        )
 
 
 @dataclass
@@ -6985,39 +8267,49 @@ class ViewItem:
     def as_dict(self) -> dict:
         """Serializes the ViewItem into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.name is not None: body['name'] = self.name
-        if self.type is not None: body['type'] = self.type.value
+        if self.content is not None:
+            body["content"] = self.content
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ViewItem into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.name is not None: body['name'] = self.name
-        if self.type is not None: body['type'] = self.type
+        if self.content is not None:
+            body["content"] = self.content
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ViewItem:
         """Deserializes the ViewItem from a dictionary."""
-        return cls(content=d.get('content', None), name=d.get('name', None), type=_enum(d, 'type', ViewType))
+        return cls(
+            content=d.get("content", None),
+            name=d.get("name", None),
+            type=_enum(d, "type", ViewType),
+        )
 
 
 class ViewType(Enum):
     """* `NOTEBOOK`: Notebook view item. * `DASHBOARD`: Dashboard view item."""
 
-    DASHBOARD = 'DASHBOARD'
-    NOTEBOOK = 'NOTEBOOK'
+    DASHBOARD = "DASHBOARD"
+    NOTEBOOK = "NOTEBOOK"
 
 
 class ViewsToExport(Enum):
     """* `CODE`: Code view of the notebook. * `DASHBOARDS`: All dashboard views of the notebook. *
     `ALL`: All views of the notebook."""
 
-    ALL = 'ALL'
-    CODE = 'CODE'
-    DASHBOARDS = 'DASHBOARDS'
+    ALL = "ALL"
+    CODE = "CODE"
+    DASHBOARDS = "DASHBOARDS"
 
 
 @dataclass
@@ -7027,19 +8319,21 @@ class Webhook:
     def as_dict(self) -> dict:
         """Serializes the Webhook into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Webhook into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Webhook:
         """Deserializes the Webhook from a dictionary."""
-        return cls(id=d.get('id', None))
+        return cls(id=d.get("id", None))
 
 
 @dataclass
@@ -7073,72 +8367,85 @@ def as_dict(self) -> dict:
         """Serializes the WebhookNotifications into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.on_duration_warning_threshold_exceeded:
-            body['on_duration_warning_threshold_exceeded'] = [
+            body["on_duration_warning_threshold_exceeded"] = [
                 v.as_dict() for v in self.on_duration_warning_threshold_exceeded
             ]
-        if self.on_failure: body['on_failure'] = [v.as_dict() for v in self.on_failure]
-        if self.on_start: body['on_start'] = [v.as_dict() for v in self.on_start]
+        if self.on_failure:
+            body["on_failure"] = [v.as_dict() for v in self.on_failure]
+        if self.on_start:
+            body["on_start"] = [v.as_dict() for v in self.on_start]
         if self.on_streaming_backlog_exceeded:
-            body['on_streaming_backlog_exceeded'] = [v.as_dict() for v in self.on_streaming_backlog_exceeded]
-        if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success]
+            body["on_streaming_backlog_exceeded"] = [v.as_dict() for v in self.on_streaming_backlog_exceeded]
+        if self.on_success:
+            body["on_success"] = [v.as_dict() for v in self.on_success]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WebhookNotifications into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.on_duration_warning_threshold_exceeded:
-            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
-        if self.on_failure: body['on_failure'] = self.on_failure
-        if self.on_start: body['on_start'] = self.on_start
+            body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure:
+            body["on_failure"] = self.on_failure
+        if self.on_start:
+            body["on_start"] = self.on_start
         if self.on_streaming_backlog_exceeded:
-            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
-        if self.on_success: body['on_success'] = self.on_success
+            body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded
+        if self.on_success:
+            body["on_success"] = self.on_success
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WebhookNotifications:
         """Deserializes the WebhookNotifications from a dictionary."""
-        return cls(on_duration_warning_threshold_exceeded=_repeated_dict(
-            d, 'on_duration_warning_threshold_exceeded', Webhook),
-                   on_failure=_repeated_dict(d, 'on_failure', Webhook),
-                   on_start=_repeated_dict(d, 'on_start', Webhook),
-                   on_streaming_backlog_exceeded=_repeated_dict(d, 'on_streaming_backlog_exceeded', Webhook),
-                   on_success=_repeated_dict(d, 'on_success', Webhook))
+        return cls(
+            on_duration_warning_threshold_exceeded=_repeated_dict(d, "on_duration_warning_threshold_exceeded", Webhook),
+            on_failure=_repeated_dict(d, "on_failure", Webhook),
+            on_start=_repeated_dict(d, "on_start", Webhook),
+            on_streaming_backlog_exceeded=_repeated_dict(d, "on_streaming_backlog_exceeded", Webhook),
+            on_success=_repeated_dict(d, "on_success", Webhook),
+        )
 
 
 class JobsAPI:
     """The Jobs API allows you to create, edit, and delete jobs.
-    
+
     You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with
     scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with
     complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error
     reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use
     scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or
     Python, Scala, Spark submit, and Java applications.
-    
+
     You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in
     the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs.
-    
+
     [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html
     [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html
-    [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets"""
+    [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_run_job_terminated_or_skipped(self,
-                                               run_id: int,
-                                               timeout=timedelta(minutes=20),
-                                               callback: Optional[Callable[[Run], None]] = None) -> Run:
+    def wait_get_run_job_terminated_or_skipped(
+        self,
+        run_id: int,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[Run], None]] = None,
+    ) -> Run:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (RunLifeCycleState.TERMINATED, RunLifeCycleState.SKIPPED, )
-        failure_states = (RunLifeCycleState.INTERNAL_ERROR, )
-        status_message = 'polling...'
+        target_states = (
+            RunLifeCycleState.TERMINATED,
+            RunLifeCycleState.SKIPPED,
+        )
+        failure_states = (RunLifeCycleState.INTERNAL_ERROR,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get_run(run_id=run_id)
             status = poll.state.life_cycle_state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.state:
                 status_message = poll.state.state_message
             if status in target_states:
@@ -7146,95 +8453,113 @@ def wait_get_run_job_terminated_or_skipped(self,
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach TERMINATED or SKIPPED, got {status}: {status_message}'
+                msg = f"failed to reach TERMINATED or SKIPPED, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"run_id={run_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
-    def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None):
+    def cancel_all_runs(
+        self,
+        *,
+        all_queued_runs: Optional[bool] = None,
+        job_id: Optional[int] = None,
+    ):
         """Cancel all runs of a job.
-        
+
         Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
         from being started.
-        
+
         :param all_queued_runs: bool (optional)
           Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in
           the workspace are canceled.
         :param job_id: int (optional)
           The canonical identifier of the job to cancel all runs of.
-        
-        
+
+
         """
         body = {}
-        if all_queued_runs is not None: body['all_queued_runs'] = all_queued_runs
-        if job_id is not None: body['job_id'] = job_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if all_queued_runs is not None:
+            body["all_queued_runs"] = all_queued_runs
+        if job_id is not None:
+            body["job_id"] = job_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/jobs/runs/cancel-all', body=body, headers=headers)
+        self._api.do("POST", "/api/2.1/jobs/runs/cancel-all", body=body, headers=headers)
 
     def cancel_run(self, run_id: int) -> Wait[Run]:
         """Cancel a run.
-        
+
         Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
         this request completes.
-        
+
         :param run_id: int
           This field is required.
-        
+
         :returns:
           Long-running operation waiter for :class:`Run`.
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         """
         body = {}
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        op_response = self._api.do('POST', '/api/2.1/jobs/runs/cancel', body=body, headers=headers)
-        return Wait(self.wait_get_run_job_terminated_or_skipped,
-                    response=CancelRunResponse.from_dict(op_response),
-                    run_id=run_id)
+        op_response = self._api.do("POST", "/api/2.1/jobs/runs/cancel", body=body, headers=headers)
+        return Wait(
+            self.wait_get_run_job_terminated_or_skipped,
+            response=CancelRunResponse.from_dict(op_response),
+            run_id=run_id,
+        )
 
     def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run:
         return self.cancel_run(run_id=run_id).result(timeout=timeout)
 
-    def create(self,
-               *,
-               access_control_list: Optional[List[JobAccessControlRequest]] = None,
-               budget_policy_id: Optional[str] = None,
-               continuous: Optional[Continuous] = None,
-               deployment: Optional[JobDeployment] = None,
-               description: Optional[str] = None,
-               edit_mode: Optional[JobEditMode] = None,
-               email_notifications: Optional[JobEmailNotifications] = None,
-               environments: Optional[List[JobEnvironment]] = None,
-               format: Optional[Format] = None,
-               git_source: Optional[GitSource] = None,
-               health: Optional[JobsHealthRules] = None,
-               job_clusters: Optional[List[JobCluster]] = None,
-               max_concurrent_runs: Optional[int] = None,
-               name: Optional[str] = None,
-               notification_settings: Optional[JobNotificationSettings] = None,
-               parameters: Optional[List[JobParameterDefinition]] = None,
-               performance_target: Optional[PerformanceTarget] = None,
-               queue: Optional[QueueSettings] = None,
-               run_as: Optional[JobRunAs] = None,
-               schedule: Optional[CronSchedule] = None,
-               tags: Optional[Dict[str, str]] = None,
-               tasks: Optional[List[Task]] = None,
-               timeout_seconds: Optional[int] = None,
-               trigger: Optional[TriggerSettings] = None,
-               webhook_notifications: Optional[WebhookNotifications] = None) -> CreateResponse:
+    def create(
+        self,
+        *,
+        access_control_list: Optional[List[JobAccessControlRequest]] = None,
+        budget_policy_id: Optional[str] = None,
+        continuous: Optional[Continuous] = None,
+        deployment: Optional[JobDeployment] = None,
+        description: Optional[str] = None,
+        edit_mode: Optional[JobEditMode] = None,
+        email_notifications: Optional[JobEmailNotifications] = None,
+        environments: Optional[List[JobEnvironment]] = None,
+        format: Optional[Format] = None,
+        git_source: Optional[GitSource] = None,
+        health: Optional[JobsHealthRules] = None,
+        job_clusters: Optional[List[JobCluster]] = None,
+        max_concurrent_runs: Optional[int] = None,
+        name: Optional[str] = None,
+        notification_settings: Optional[JobNotificationSettings] = None,
+        parameters: Optional[List[JobParameterDefinition]] = None,
+        performance_target: Optional[PerformanceTarget] = None,
+        queue: Optional[QueueSettings] = None,
+        run_as: Optional[JobRunAs] = None,
+        schedule: Optional[CronSchedule] = None,
+        tags: Optional[Dict[str, str]] = None,
+        tasks: Optional[List[Task]] = None,
+        timeout_seconds: Optional[int] = None,
+        trigger: Optional[TriggerSettings] = None,
+        webhook_notifications: Optional[WebhookNotifications] = None,
+    ) -> CreateResponse:
         """Create a new job.
-        
+
         Create a new job.
-        
+
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
         :param budget_policy_id: str (optional)
@@ -7250,7 +8575,7 @@ def create(self,
           An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
         :param edit_mode: :class:`JobEditMode` (optional)
           Edit mode of the job.
-          
+
           * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in
           an editable state and can be modified.
         :param email_notifications: :class:`JobEmailNotifications` (optional)
@@ -7267,10 +8592,10 @@ def create(self,
         :param git_source: :class:`GitSource` (optional)
           An optional specification for a remote Git repository containing the source code used by tasks.
           Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-          
+
           If `git_source` is set, these tasks retrieve the file from the remote repository by default.
           However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-          
+
           Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
           used, `git_source` must be defined on the job.
         :param health: :class:`JobsHealthRules` (optional)
@@ -7303,7 +8628,7 @@ def create(self,
         :param run_as: :class:`JobRunAs` (optional)
           Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
           the job runs as the user who created the job.
-          
+
           Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -7324,168 +8649,221 @@ def create(self,
           `runNow`.
         :param webhook_notifications: :class:`WebhookNotifications` (optional)
           A collection of system notification IDs to notify when runs of this job begin or complete.
-        
+
         :returns: :class:`CreateResponse`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
-        if continuous is not None: body['continuous'] = continuous.as_dict()
-        if deployment is not None: body['deployment'] = deployment.as_dict()
-        if description is not None: body['description'] = description
-        if edit_mode is not None: body['edit_mode'] = edit_mode.value
-        if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
-        if environments is not None: body['environments'] = [v.as_dict() for v in environments]
-        if format is not None: body['format'] = format.value
-        if git_source is not None: body['git_source'] = git_source.as_dict()
-        if health is not None: body['health'] = health.as_dict()
-        if job_clusters is not None: body['job_clusters'] = [v.as_dict() for v in job_clusters]
-        if max_concurrent_runs is not None: body['max_concurrent_runs'] = max_concurrent_runs
-        if name is not None: body['name'] = name
-        if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
-        if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters]
-        if performance_target is not None: body['performance_target'] = performance_target.value
-        if queue is not None: body['queue'] = queue.as_dict()
-        if run_as is not None: body['run_as'] = run_as.as_dict()
-        if schedule is not None: body['schedule'] = schedule.as_dict()
-        if tags is not None: body['tags'] = tags
-        if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks]
-        if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
-        if trigger is not None: body['trigger'] = trigger.as_dict()
-        if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/jobs/create', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        if budget_policy_id is not None:
+            body["budget_policy_id"] = budget_policy_id
+        if continuous is not None:
+            body["continuous"] = continuous.as_dict()
+        if deployment is not None:
+            body["deployment"] = deployment.as_dict()
+        if description is not None:
+            body["description"] = description
+        if edit_mode is not None:
+            body["edit_mode"] = edit_mode.value
+        if email_notifications is not None:
+            body["email_notifications"] = email_notifications.as_dict()
+        if environments is not None:
+            body["environments"] = [v.as_dict() for v in environments]
+        if format is not None:
+            body["format"] = format.value
+        if git_source is not None:
+            body["git_source"] = git_source.as_dict()
+        if health is not None:
+            body["health"] = health.as_dict()
+        if job_clusters is not None:
+            body["job_clusters"] = [v.as_dict() for v in job_clusters]
+        if max_concurrent_runs is not None:
+            body["max_concurrent_runs"] = max_concurrent_runs
+        if name is not None:
+            body["name"] = name
+        if notification_settings is not None:
+            body["notification_settings"] = notification_settings.as_dict()
+        if parameters is not None:
+            body["parameters"] = [v.as_dict() for v in parameters]
+        if performance_target is not None:
+            body["performance_target"] = performance_target.value
+        if queue is not None:
+            body["queue"] = queue.as_dict()
+        if run_as is not None:
+            body["run_as"] = run_as.as_dict()
+        if schedule is not None:
+            body["schedule"] = schedule.as_dict()
+        if tags is not None:
+            body["tags"] = tags
+        if tasks is not None:
+            body["tasks"] = [v.as_dict() for v in tasks]
+        if timeout_seconds is not None:
+            body["timeout_seconds"] = timeout_seconds
+        if trigger is not None:
+            body["trigger"] = trigger.as_dict()
+        if webhook_notifications is not None:
+            body["webhook_notifications"] = webhook_notifications.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.1/jobs/create", body=body, headers=headers)
         return CreateResponse.from_dict(res)
 
     def delete(self, job_id: int):
         """Delete a job.
-        
+
         Deletes a job.
-        
+
         :param job_id: int
           The canonical identifier of the job to delete. This field is required.
-        
-        
+
+
         """
         body = {}
-        if job_id is not None: body['job_id'] = job_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if job_id is not None:
+            body["job_id"] = job_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/jobs/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.1/jobs/delete", body=body, headers=headers)
 
     def delete_run(self, run_id: int):
         """Delete a job run.
-        
+
         Deletes a non-active run. Returns an error if the run is active.
-        
+
         :param run_id: int
           ID of the run to delete.
-        
-        
+
+
         """
         body = {}
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/jobs/runs/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.1/jobs/runs/delete", body=body, headers=headers)
 
     def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput:
         """Export and retrieve a job run.
-        
+
         Export and retrieve the job run task.
-        
+
         :param run_id: int
           The canonical identifier for the run. This field is required.
         :param views_to_export: :class:`ViewsToExport` (optional)
           Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE.
-        
+
         :returns: :class:`ExportRunOutput`
         """
 
         query = {}
-        if run_id is not None: query['run_id'] = run_id
-        if views_to_export is not None: query['views_to_export'] = views_to_export.value
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
+        if run_id is not None:
+            query["run_id"] = run_id
+        if views_to_export is not None:
+            query["views_to_export"] = views_to_export.value
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.1/jobs/runs/export", query=query, headers=headers)
         return ExportRunOutput.from_dict(res)
 
     def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
         """Get a single job.
-        
+
         Retrieves the details for a single job.
-        
+
         In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
         either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
         value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
         be empty on later pages.
-        
+
         :param job_id: int
           The canonical identifier of the job to retrieve information about. This field is required.
         :param page_token: str (optional)
           Use `next_page_token` returned from the previous GetJob to request the next page of the job's
           sub-resources.
-        
+
         :returns: :class:`Job`
         """
 
         query = {}
-        if job_id is not None: query['job_id'] = job_id
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
+        if job_id is not None:
+            query["job_id"] = job_id
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.1/jobs/get", query=query, headers=headers)
         return Job.from_dict(res)
 
     def get_permission_levels(self, job_id: str) -> GetJobPermissionLevelsResponse:
         """Get job permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param job_id: str
           The job for which to get or manage permissions.
-        
+
         :returns: :class:`GetJobPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}/permissionLevels', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/jobs/{job_id}/permissionLevels",
+            headers=headers,
+        )
         return GetJobPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, job_id: str) -> JobPermissions:
         """Get job permissions.
-        
+
         Gets the permissions of a job. Jobs can inherit permissions from their root object.
-        
+
         :param job_id: str
           The job for which to get or manage permissions.
-        
+
         :returns: :class:`JobPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/jobs/{job_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/permissions/jobs/{job_id}", headers=headers)
         return JobPermissions.from_dict(res)
 
-    def get_run(self,
-                run_id: int,
-                *,
-                include_history: Optional[bool] = None,
-                include_resolved_values: Optional[bool] = None,
-                page_token: Optional[str] = None) -> Run:
+    def get_run(
+        self,
+        run_id: int,
+        *,
+        include_history: Optional[bool] = None,
+        include_resolved_values: Optional[bool] = None,
+        page_token: Optional[str] = None,
+    ) -> Run:
         """Get a single job run.
-        
+
         Retrieves the metadata of a run.
-        
+
         In Jobs API 2.2, requests for a single job run support pagination of `tasks` and `job_clusters` when
         either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
         value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
         be empty on later pages.
-        
+
         :param run_id: int
           The canonical identifier of the run for which to retrieve the metadata. This field is required.
         :param include_history: bool (optional)
@@ -7495,56 +8873,72 @@ def get_run(self,
         :param page_token: str (optional)
           Use `next_page_token` returned from the previous GetRun to request the next page of the run's
           sub-resources.
-        
+
         :returns: :class:`Run`
         """
 
         query = {}
-        if include_history is not None: query['include_history'] = include_history
-        if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values
-        if page_token is not None: query['page_token'] = page_token
-        if run_id is not None: query['run_id'] = run_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.1/jobs/runs/get', query=query, headers=headers)
+        if include_history is not None:
+            query["include_history"] = include_history
+        if include_resolved_values is not None:
+            query["include_resolved_values"] = include_resolved_values
+        if page_token is not None:
+            query["page_token"] = page_token
+        if run_id is not None:
+            query["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.1/jobs/runs/get", query=query, headers=headers)
         return Run.from_dict(res)
 
     def get_run_output(self, run_id: int) -> RunOutput:
         """Get the output for a single run.
-        
+
         Retrieve the output and metadata of a single task run. When a notebook task returns a value through
         the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks
         restricts this API to returning the first 5 MB of the output. To return a larger result, you can store
         job results in a cloud storage service.
-        
+
         This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if
         the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to
         reference them beyond 60 days, you must save old run results before they expire.
-        
+
         :param run_id: int
           The canonical identifier for the run.
-        
+
         :returns: :class:`RunOutput`
         """
 
         query = {}
-        if run_id is not None: query['run_id'] = run_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.1/jobs/runs/get-output', query=query, headers=headers)
+        if run_id is not None:
+            query["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.1/jobs/runs/get-output",
+            query=query,
+            headers=headers,
+        )
         return RunOutput.from_dict(res)
 
-    def list(self,
-             *,
-             expand_tasks: Optional[bool] = None,
-             limit: Optional[int] = None,
-             name: Optional[str] = None,
-             offset: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[BaseJob]:
+    def list(
+        self,
+        *,
+        expand_tasks: Optional[bool] = None,
+        limit: Optional[int] = None,
+        name: Optional[str] = None,
+        offset: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[BaseJob]:
         """List jobs.
-        
+
         Retrieves a list of jobs.
-        
+
         :param expand_tasks: bool (optional)
           Whether to include task and cluster details in the response. Note that in API 2.2, only the first
           100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
@@ -7559,43 +8953,52 @@ def list(self,
         :param page_token: str (optional)
           Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
           previous page of jobs respectively.
-        
+
         :returns: Iterator over :class:`BaseJob`
         """
 
         query = {}
-        if expand_tasks is not None: query['expand_tasks'] = expand_tasks
-        if limit is not None: query['limit'] = limit
-        if name is not None: query['name'] = name
-        if offset is not None: query['offset'] = offset
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if expand_tasks is not None:
+            query["expand_tasks"] = expand_tasks
+        if limit is not None:
+            query["limit"] = limit
+        if name is not None:
+            query["name"] = name
+        if offset is not None:
+            query["offset"] = offset
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/jobs/list', query=query, headers=headers)
-            if 'jobs' in json:
-                for v in json['jobs']:
+            json = self._api.do("GET", "/api/2.1/jobs/list", query=query, headers=headers)
+            if "jobs" in json:
+                for v in json["jobs"]:
                     yield BaseJob.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_runs(self,
-                  *,
-                  active_only: Optional[bool] = None,
-                  completed_only: Optional[bool] = None,
-                  expand_tasks: Optional[bool] = None,
-                  job_id: Optional[int] = None,
-                  limit: Optional[int] = None,
-                  offset: Optional[int] = None,
-                  page_token: Optional[str] = None,
-                  run_type: Optional[RunType] = None,
-                  start_time_from: Optional[int] = None,
-                  start_time_to: Optional[int] = None) -> Iterator[BaseRun]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_runs(
+        self,
+        *,
+        active_only: Optional[bool] = None,
+        completed_only: Optional[bool] = None,
+        expand_tasks: Optional[bool] = None,
+        job_id: Optional[int] = None,
+        limit: Optional[int] = None,
+        offset: Optional[int] = None,
+        page_token: Optional[str] = None,
+        run_type: Optional[RunType] = None,
+        start_time_from: Optional[int] = None,
+        start_time_to: Optional[int] = None,
+    ) -> Iterator[BaseRun]:
         """List job runs.
-        
+
         List runs in descending order by start time.
-        
+
         :param active_only: bool (optional)
           If active_only is `true`, only active runs are included in the results; otherwise, lists both active
           and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`.
@@ -7625,53 +9028,67 @@ def list_runs(self,
         :param start_time_to: int (optional)
           Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds.
           Can be combined with _start_time_from_ to filter by a time range.
-        
+
         :returns: Iterator over :class:`BaseRun`
         """
 
         query = {}
-        if active_only is not None: query['active_only'] = active_only
-        if completed_only is not None: query['completed_only'] = completed_only
-        if expand_tasks is not None: query['expand_tasks'] = expand_tasks
-        if job_id is not None: query['job_id'] = job_id
-        if limit is not None: query['limit'] = limit
-        if offset is not None: query['offset'] = offset
-        if page_token is not None: query['page_token'] = page_token
-        if run_type is not None: query['run_type'] = run_type.value
-        if start_time_from is not None: query['start_time_from'] = start_time_from
-        if start_time_to is not None: query['start_time_to'] = start_time_to
-        headers = {'Accept': 'application/json', }
+        if active_only is not None:
+            query["active_only"] = active_only
+        if completed_only is not None:
+            query["completed_only"] = completed_only
+        if expand_tasks is not None:
+            query["expand_tasks"] = expand_tasks
+        if job_id is not None:
+            query["job_id"] = job_id
+        if limit is not None:
+            query["limit"] = limit
+        if offset is not None:
+            query["offset"] = offset
+        if page_token is not None:
+            query["page_token"] = page_token
+        if run_type is not None:
+            query["run_type"] = run_type.value
+        if start_time_from is not None:
+            query["start_time_from"] = start_time_from
+        if start_time_to is not None:
+            query["start_time_to"] = start_time_to
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/jobs/runs/list', query=query, headers=headers)
-            if 'runs' in json:
-                for v in json['runs']:
+            json = self._api.do("GET", "/api/2.1/jobs/runs/list", query=query, headers=headers)
+            if "runs" in json:
+                for v in json["runs"]:
                     yield BaseRun.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def repair_run(self,
-                   run_id: int,
-                   *,
-                   dbt_commands: Optional[List[str]] = None,
-                   jar_params: Optional[List[str]] = None,
-                   job_parameters: Optional[Dict[str, str]] = None,
-                   latest_repair_id: Optional[int] = None,
-                   notebook_params: Optional[Dict[str, str]] = None,
-                   pipeline_params: Optional[PipelineParams] = None,
-                   python_named_params: Optional[Dict[str, str]] = None,
-                   python_params: Optional[List[str]] = None,
-                   rerun_all_failed_tasks: Optional[bool] = None,
-                   rerun_dependent_tasks: Optional[bool] = None,
-                   rerun_tasks: Optional[List[str]] = None,
-                   spark_submit_params: Optional[List[str]] = None,
-                   sql_params: Optional[Dict[str, str]] = None) -> Wait[Run]:
+            query["page_token"] = json["next_page_token"]
+
+    def repair_run(
+        self,
+        run_id: int,
+        *,
+        dbt_commands: Optional[List[str]] = None,
+        jar_params: Optional[List[str]] = None,
+        job_parameters: Optional[Dict[str, str]] = None,
+        latest_repair_id: Optional[int] = None,
+        notebook_params: Optional[Dict[str, str]] = None,
+        pipeline_params: Optional[PipelineParams] = None,
+        python_named_params: Optional[Dict[str, str]] = None,
+        python_params: Optional[List[str]] = None,
+        rerun_all_failed_tasks: Optional[bool] = None,
+        rerun_dependent_tasks: Optional[bool] = None,
+        rerun_tasks: Optional[List[str]] = None,
+        spark_submit_params: Optional[List[str]] = None,
+        sql_params: Optional[Dict[str, str]] = None,
+    ) -> Wait[Run]:
         """Repair a job run.
-        
+
         Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
         and task settings, and can be viewed in the history for the original job run.
-        
+
         :param run_id: int
           The job run ID of the run to repair. The run must not be in progress.
         :param dbt_commands: List[str] (optional)
@@ -7683,9 +9100,9 @@ def repair_run(self,
           task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
@@ -7696,16 +9113,16 @@ def repair_run(self,
           A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
           "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
           [dbutils.widgets.get] function.
-          
+
           If not specified upon `run-now`, the triggered run uses the job’s base parameters.
-          
+
           notebook_params cannot be specified in conjunction with jar_params.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs.
-          
+
           The JSON representation of this field (for example `{"notebook_params":{"name":"john
           doe","age":"35"}}`) cannot exceed 10,000 bytes.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
@@ -7716,15 +9133,15 @@ def repair_run(self,
           The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
           would overwrite the parameters specified in job setting. The JSON representation of this field (for
           example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs.
-          
+
           Important
-          
+
           These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
           returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
           emojis.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param rerun_all_failed_tasks: bool (optional)
           If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
@@ -7739,45 +9156,64 @@ def repair_run(self,
           as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
           in job setting. The JSON representation of this field (for example `{"python_params":["john
           doe","35"]}`) cannot exceed 10,000 bytes.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs
-          
+
           Important
-          
+
           These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
           returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
           emojis.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param sql_params: Dict[str,str] (optional)
           A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
           "age": "35"}`. The SQL alert task does not support custom parameters.
-        
+
         :returns:
           Long-running operation waiter for :class:`Run`.
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         """
         body = {}
-        if dbt_commands is not None: body['dbt_commands'] = [v for v in dbt_commands]
-        if jar_params is not None: body['jar_params'] = [v for v in jar_params]
-        if job_parameters is not None: body['job_parameters'] = job_parameters
-        if latest_repair_id is not None: body['latest_repair_id'] = latest_repair_id
-        if notebook_params is not None: body['notebook_params'] = notebook_params
-        if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
-        if python_named_params is not None: body['python_named_params'] = python_named_params
-        if python_params is not None: body['python_params'] = [v for v in python_params]
-        if rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = rerun_all_failed_tasks
-        if rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = rerun_dependent_tasks
-        if rerun_tasks is not None: body['rerun_tasks'] = [v for v in rerun_tasks]
-        if run_id is not None: body['run_id'] = run_id
-        if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params]
-        if sql_params is not None: body['sql_params'] = sql_params
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/jobs/runs/repair', body=body, headers=headers)
-        return Wait(self.wait_get_run_job_terminated_or_skipped,
-                    response=RepairRunResponse.from_dict(op_response),
-                    run_id=run_id)
+        if dbt_commands is not None:
+            body["dbt_commands"] = [v for v in dbt_commands]
+        if jar_params is not None:
+            body["jar_params"] = [v for v in jar_params]
+        if job_parameters is not None:
+            body["job_parameters"] = job_parameters
+        if latest_repair_id is not None:
+            body["latest_repair_id"] = latest_repair_id
+        if notebook_params is not None:
+            body["notebook_params"] = notebook_params
+        if pipeline_params is not None:
+            body["pipeline_params"] = pipeline_params.as_dict()
+        if python_named_params is not None:
+            body["python_named_params"] = python_named_params
+        if python_params is not None:
+            body["python_params"] = [v for v in python_params]
+        if rerun_all_failed_tasks is not None:
+            body["rerun_all_failed_tasks"] = rerun_all_failed_tasks
+        if rerun_dependent_tasks is not None:
+            body["rerun_dependent_tasks"] = rerun_dependent_tasks
+        if rerun_tasks is not None:
+            body["rerun_tasks"] = [v for v in rerun_tasks]
+        if run_id is not None:
+            body["run_id"] = run_id
+        if spark_submit_params is not None:
+            body["spark_submit_params"] = [v for v in spark_submit_params]
+        if sql_params is not None:
+            body["sql_params"] = sql_params
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/jobs/runs/repair", body=body, headers=headers)
+        return Wait(
+            self.wait_get_run_job_terminated_or_skipped,
+            response=RepairRunResponse.from_dict(op_response),
+            run_id=run_id,
+        )
 
     def repair_run_and_wait(
         self,
@@ -7796,65 +9232,75 @@ def repair_run_and_wait(
         rerun_tasks: Optional[List[str]] = None,
         spark_submit_params: Optional[List[str]] = None,
         sql_params: Optional[Dict[str, str]] = None,
-        timeout=timedelta(minutes=20)) -> Run:
-        return self.repair_run(dbt_commands=dbt_commands,
-                               jar_params=jar_params,
-                               job_parameters=job_parameters,
-                               latest_repair_id=latest_repair_id,
-                               notebook_params=notebook_params,
-                               pipeline_params=pipeline_params,
-                               python_named_params=python_named_params,
-                               python_params=python_params,
-                               rerun_all_failed_tasks=rerun_all_failed_tasks,
-                               rerun_dependent_tasks=rerun_dependent_tasks,
-                               rerun_tasks=rerun_tasks,
-                               run_id=run_id,
-                               spark_submit_params=spark_submit_params,
-                               sql_params=sql_params).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> Run:
+        return self.repair_run(
+            dbt_commands=dbt_commands,
+            jar_params=jar_params,
+            job_parameters=job_parameters,
+            latest_repair_id=latest_repair_id,
+            notebook_params=notebook_params,
+            pipeline_params=pipeline_params,
+            python_named_params=python_named_params,
+            python_params=python_params,
+            rerun_all_failed_tasks=rerun_all_failed_tasks,
+            rerun_dependent_tasks=rerun_dependent_tasks,
+            rerun_tasks=rerun_tasks,
+            run_id=run_id,
+            spark_submit_params=spark_submit_params,
+            sql_params=sql_params,
+        ).result(timeout=timeout)
 
     def reset(self, job_id: int, new_settings: JobSettings):
         """Update all job settings (reset).
-        
+
         Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
         job settings partially.
-        
+
         :param job_id: int
           The canonical identifier of the job to reset. This field is required.
         :param new_settings: :class:`JobSettings`
           The new settings of the job. These settings completely replace the old settings.
-          
+
           Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other
           fields are applied to future runs only.
-        
-        
+
+
         """
         body = {}
-        if job_id is not None: body['job_id'] = job_id
-        if new_settings is not None: body['new_settings'] = new_settings.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.1/jobs/reset', body=body, headers=headers)
-
-    def run_now(self,
-                job_id: int,
-                *,
-                dbt_commands: Optional[List[str]] = None,
-                idempotency_token: Optional[str] = None,
-                jar_params: Optional[List[str]] = None,
-                job_parameters: Optional[Dict[str, str]] = None,
-                notebook_params: Optional[Dict[str, str]] = None,
-                only: Optional[List[str]] = None,
-                performance_target: Optional[PerformanceTarget] = None,
-                pipeline_params: Optional[PipelineParams] = None,
-                python_named_params: Optional[Dict[str, str]] = None,
-                python_params: Optional[List[str]] = None,
-                queue: Optional[QueueSettings] = None,
-                spark_submit_params: Optional[List[str]] = None,
-                sql_params: Optional[Dict[str, str]] = None) -> Wait[Run]:
+        if job_id is not None:
+            body["job_id"] = job_id
+        if new_settings is not None:
+            body["new_settings"] = new_settings.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do("POST", "/api/2.1/jobs/reset", body=body, headers=headers)
+
+    def run_now(
+        self,
+        job_id: int,
+        *,
+        dbt_commands: Optional[List[str]] = None,
+        idempotency_token: Optional[str] = None,
+        jar_params: Optional[List[str]] = None,
+        job_parameters: Optional[Dict[str, str]] = None,
+        notebook_params: Optional[Dict[str, str]] = None,
+        only: Optional[List[str]] = None,
+        performance_target: Optional[PerformanceTarget] = None,
+        pipeline_params: Optional[PipelineParams] = None,
+        python_named_params: Optional[Dict[str, str]] = None,
+        python_params: Optional[List[str]] = None,
+        queue: Optional[QueueSettings] = None,
+        spark_submit_params: Optional[List[str]] = None,
+        sql_params: Optional[Dict[str, str]] = None,
+    ) -> Wait[Run]:
         """Trigger a new job run.
-        
+
         Run a job and return the `run_id` of the triggered run.
-        
+
         :param job_id: int
           The ID of the job to be executed
         :param dbt_commands: List[str] (optional)
@@ -7864,14 +9310,14 @@ def run_now(self,
           An optional token to guarantee the idempotency of job run requests. If a run with the provided token
           already exists, the request does not create a new run but returns the ID of the existing run
           instead. If a run with the provided token is deleted, an error is returned.
-          
+
           If you specify the idempotency token, upon failure you can retry until the request succeeds.
           Databricks guarantees that exactly one run is launched with that idempotency token.
-          
+
           This token must have at most 64 characters.
-          
+
           For more information, see [How to ensure idempotency for jobs].
-          
+
           [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
         :param jar_params: List[str] (optional)
           A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
@@ -7879,9 +9325,9 @@ def run_now(self,
           task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
@@ -7889,16 +9335,16 @@ def run_now(self,
           A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
           "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
           [dbutils.widgets.get] function.
-          
+
           If not specified upon `run-now`, the triggered run uses the job’s base parameters.
-          
+
           notebook_params cannot be specified in conjunction with jar_params.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs.
-          
+
           The JSON representation of this field (for example `{"notebook_params":{"name":"john
           doe","age":"35"}}`) cannot exceed 10,000 bytes.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param only: List[str] (optional)
@@ -7916,15 +9362,15 @@ def run_now(self,
           The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
           would overwrite the parameters specified in job setting. The JSON representation of this field (for
           example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs.
-          
+
           Important
-          
+
           These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
           returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
           emojis.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the run.
@@ -7934,124 +9380,158 @@ def run_now(self,
           as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
           in job setting. The JSON representation of this field (for example `{"python_params":["john
           doe","35"]}`) cannot exceed 10,000 bytes.
-          
+
           Use [Task parameter variables] to set parameters containing information about job runs
-          
+
           Important
-          
+
           These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
           returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
           emojis.
-          
+
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param sql_params: Dict[str,str] (optional)
           A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
           "age": "35"}`. The SQL alert task does not support custom parameters.
-        
+
         :returns:
           Long-running operation waiter for :class:`Run`.
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         """
         body = {}
-        if dbt_commands is not None: body['dbt_commands'] = [v for v in dbt_commands]
-        if idempotency_token is not None: body['idempotency_token'] = idempotency_token
-        if jar_params is not None: body['jar_params'] = [v for v in jar_params]
-        if job_id is not None: body['job_id'] = job_id
-        if job_parameters is not None: body['job_parameters'] = job_parameters
-        if notebook_params is not None: body['notebook_params'] = notebook_params
-        if only is not None: body['only'] = [v for v in only]
-        if performance_target is not None: body['performance_target'] = performance_target.value
-        if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
-        if python_named_params is not None: body['python_named_params'] = python_named_params
-        if python_params is not None: body['python_params'] = [v for v in python_params]
-        if queue is not None: body['queue'] = queue.as_dict()
-        if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params]
-        if sql_params is not None: body['sql_params'] = sql_params
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/jobs/run-now', body=body, headers=headers)
-        return Wait(self.wait_get_run_job_terminated_or_skipped,
-                    response=RunNowResponse.from_dict(op_response),
-                    run_id=op_response['run_id'])
-
-    def run_now_and_wait(self,
-                         job_id: int,
-                         *,
-                         dbt_commands: Optional[List[str]] = None,
-                         idempotency_token: Optional[str] = None,
-                         jar_params: Optional[List[str]] = None,
-                         job_parameters: Optional[Dict[str, str]] = None,
-                         notebook_params: Optional[Dict[str, str]] = None,
-                         only: Optional[List[str]] = None,
-                         performance_target: Optional[PerformanceTarget] = None,
-                         pipeline_params: Optional[PipelineParams] = None,
-                         python_named_params: Optional[Dict[str, str]] = None,
-                         python_params: Optional[List[str]] = None,
-                         queue: Optional[QueueSettings] = None,
-                         spark_submit_params: Optional[List[str]] = None,
-                         sql_params: Optional[Dict[str, str]] = None,
-                         timeout=timedelta(minutes=20)) -> Run:
-        return self.run_now(dbt_commands=dbt_commands,
-                            idempotency_token=idempotency_token,
-                            jar_params=jar_params,
-                            job_id=job_id,
-                            job_parameters=job_parameters,
-                            notebook_params=notebook_params,
-                            only=only,
-                            performance_target=performance_target,
-                            pipeline_params=pipeline_params,
-                            python_named_params=python_named_params,
-                            python_params=python_params,
-                            queue=queue,
-                            spark_submit_params=spark_submit_params,
-                            sql_params=sql_params).result(timeout=timeout)
+        if dbt_commands is not None:
+            body["dbt_commands"] = [v for v in dbt_commands]
+        if idempotency_token is not None:
+            body["idempotency_token"] = idempotency_token
+        if jar_params is not None:
+            body["jar_params"] = [v for v in jar_params]
+        if job_id is not None:
+            body["job_id"] = job_id
+        if job_parameters is not None:
+            body["job_parameters"] = job_parameters
+        if notebook_params is not None:
+            body["notebook_params"] = notebook_params
+        if only is not None:
+            body["only"] = [v for v in only]
+        if performance_target is not None:
+            body["performance_target"] = performance_target.value
+        if pipeline_params is not None:
+            body["pipeline_params"] = pipeline_params.as_dict()
+        if python_named_params is not None:
+            body["python_named_params"] = python_named_params
+        if python_params is not None:
+            body["python_params"] = [v for v in python_params]
+        if queue is not None:
+            body["queue"] = queue.as_dict()
+        if spark_submit_params is not None:
+            body["spark_submit_params"] = [v for v in spark_submit_params]
+        if sql_params is not None:
+            body["sql_params"] = sql_params
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/jobs/run-now", body=body, headers=headers)
+        return Wait(
+            self.wait_get_run_job_terminated_or_skipped,
+            response=RunNowResponse.from_dict(op_response),
+            run_id=op_response["run_id"],
+        )
+
+    def run_now_and_wait(
+        self,
+        job_id: int,
+        *,
+        dbt_commands: Optional[List[str]] = None,
+        idempotency_token: Optional[str] = None,
+        jar_params: Optional[List[str]] = None,
+        job_parameters: Optional[Dict[str, str]] = None,
+        notebook_params: Optional[Dict[str, str]] = None,
+        only: Optional[List[str]] = None,
+        performance_target: Optional[PerformanceTarget] = None,
+        pipeline_params: Optional[PipelineParams] = None,
+        python_named_params: Optional[Dict[str, str]] = None,
+        python_params: Optional[List[str]] = None,
+        queue: Optional[QueueSettings] = None,
+        spark_submit_params: Optional[List[str]] = None,
+        sql_params: Optional[Dict[str, str]] = None,
+        timeout=timedelta(minutes=20),
+    ) -> Run:
+        return self.run_now(
+            dbt_commands=dbt_commands,
+            idempotency_token=idempotency_token,
+            jar_params=jar_params,
+            job_id=job_id,
+            job_parameters=job_parameters,
+            notebook_params=notebook_params,
+            only=only,
+            performance_target=performance_target,
+            pipeline_params=pipeline_params,
+            python_named_params=python_named_params,
+            python_params=python_params,
+            queue=queue,
+            spark_submit_params=spark_submit_params,
+            sql_params=sql_params,
+        ).result(timeout=timeout)
 
     def set_permissions(
-            self,
-            job_id: str,
-            *,
-            access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions:
+        self,
+        job_id: str,
+        *,
+        access_control_list: Optional[List[JobAccessControlRequest]] = None,
+    ) -> JobPermissions:
         """Set job permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param job_id: str
           The job for which to get or manage permissions.
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-        
+
         :returns: :class:`JobPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/jobs/{job_id}",
+            body=body,
+            headers=headers,
+        )
         return JobPermissions.from_dict(res)
 
-    def submit(self,
-               *,
-               access_control_list: Optional[List[JobAccessControlRequest]] = None,
-               budget_policy_id: Optional[str] = None,
-               email_notifications: Optional[JobEmailNotifications] = None,
-               environments: Optional[List[JobEnvironment]] = None,
-               git_source: Optional[GitSource] = None,
-               health: Optional[JobsHealthRules] = None,
-               idempotency_token: Optional[str] = None,
-               notification_settings: Optional[JobNotificationSettings] = None,
-               queue: Optional[QueueSettings] = None,
-               run_as: Optional[JobRunAs] = None,
-               run_name: Optional[str] = None,
-               tasks: Optional[List[SubmitTask]] = None,
-               timeout_seconds: Optional[int] = None,
-               webhook_notifications: Optional[WebhookNotifications] = None) -> Wait[Run]:
+    def submit(
+        self,
+        *,
+        access_control_list: Optional[List[JobAccessControlRequest]] = None,
+        budget_policy_id: Optional[str] = None,
+        email_notifications: Optional[JobEmailNotifications] = None,
+        environments: Optional[List[JobEnvironment]] = None,
+        git_source: Optional[GitSource] = None,
+        health: Optional[JobsHealthRules] = None,
+        idempotency_token: Optional[str] = None,
+        notification_settings: Optional[JobNotificationSettings] = None,
+        queue: Optional[QueueSettings] = None,
+        run_as: Optional[JobRunAs] = None,
+        run_name: Optional[str] = None,
+        tasks: Optional[List[SubmitTask]] = None,
+        timeout_seconds: Optional[int] = None,
+        webhook_notifications: Optional[WebhookNotifications] = None,
+    ) -> Wait[Run]:
         """Create and trigger a one-time run.
-        
+
         Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
         Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
         run state after the job is submitted.
-        
+
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
         :param budget_policy_id: str (optional)
@@ -8064,10 +9544,10 @@ def submit(self,
         :param git_source: :class:`GitSource` (optional)
           An optional specification for a remote Git repository containing the source code used by tasks.
           Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-          
+
           If `git_source` is set, these tasks retrieve the file from the remote repository by default.
           However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-          
+
           Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
           used, `git_source` must be defined on the job.
         :param health: :class:`JobsHealthRules` (optional)
@@ -8076,14 +9556,14 @@ def submit(self,
           An optional token that can be used to guarantee the idempotency of job run requests. If a run with
           the provided token already exists, the request does not create a new run but returns the ID of the
           existing run instead. If a run with the provided token is deleted, an error is returned.
-          
+
           If you specify the idempotency token, upon failure you can retry until the request succeeds.
           Databricks guarantees that exactly one run is launched with that idempotency token.
-          
+
           This token must have at most 64 characters.
-          
+
           For more information, see [How to ensure idempotency for jobs].
-          
+
           [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
         :param notification_settings: :class:`JobNotificationSettings` (optional)
           Optional notification settings that are used when sending notifications to each of the
@@ -8100,33 +9580,51 @@ def submit(self,
           An optional timeout applied to each run of this job. A value of `0` means no timeout.
         :param webhook_notifications: :class:`WebhookNotifications` (optional)
           A collection of system notification IDs to notify when the run begins or completes.
-        
+
         :returns:
           Long-running operation waiter for :class:`Run`.
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
-        if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
-        if environments is not None: body['environments'] = [v.as_dict() for v in environments]
-        if git_source is not None: body['git_source'] = git_source.as_dict()
-        if health is not None: body['health'] = health.as_dict()
-        if idempotency_token is not None: body['idempotency_token'] = idempotency_token
-        if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
-        if queue is not None: body['queue'] = queue.as_dict()
-        if run_as is not None: body['run_as'] = run_as.as_dict()
-        if run_name is not None: body['run_name'] = run_name
-        if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks]
-        if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds
-        if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.1/jobs/runs/submit', body=body, headers=headers)
-        return Wait(self.wait_get_run_job_terminated_or_skipped,
-                    response=SubmitRunResponse.from_dict(op_response),
-                    run_id=op_response['run_id'])
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        if budget_policy_id is not None:
+            body["budget_policy_id"] = budget_policy_id
+        if email_notifications is not None:
+            body["email_notifications"] = email_notifications.as_dict()
+        if environments is not None:
+            body["environments"] = [v.as_dict() for v in environments]
+        if git_source is not None:
+            body["git_source"] = git_source.as_dict()
+        if health is not None:
+            body["health"] = health.as_dict()
+        if idempotency_token is not None:
+            body["idempotency_token"] = idempotency_token
+        if notification_settings is not None:
+            body["notification_settings"] = notification_settings.as_dict()
+        if queue is not None:
+            body["queue"] = queue.as_dict()
+        if run_as is not None:
+            body["run_as"] = run_as.as_dict()
+        if run_name is not None:
+            body["run_name"] = run_name
+        if tasks is not None:
+            body["tasks"] = [v.as_dict() for v in tasks]
+        if timeout_seconds is not None:
+            body["timeout_seconds"] = timeout_seconds
+        if webhook_notifications is not None:
+            body["webhook_notifications"] = webhook_notifications.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.1/jobs/runs/submit", body=body, headers=headers)
+        return Wait(
+            self.wait_get_run_job_terminated_or_skipped,
+            response=SubmitRunResponse.from_dict(op_response),
+            run_id=op_response["run_id"],
+        )
 
     def submit_and_wait(
         self,
@@ -8145,32 +9643,37 @@ def submit_and_wait(
         tasks: Optional[List[SubmitTask]] = None,
         timeout_seconds: Optional[int] = None,
         webhook_notifications: Optional[WebhookNotifications] = None,
-        timeout=timedelta(minutes=20)) -> Run:
-        return self.submit(access_control_list=access_control_list,
-                           budget_policy_id=budget_policy_id,
-                           email_notifications=email_notifications,
-                           environments=environments,
-                           git_source=git_source,
-                           health=health,
-                           idempotency_token=idempotency_token,
-                           notification_settings=notification_settings,
-                           queue=queue,
-                           run_as=run_as,
-                           run_name=run_name,
-                           tasks=tasks,
-                           timeout_seconds=timeout_seconds,
-                           webhook_notifications=webhook_notifications).result(timeout=timeout)
-
-    def update(self,
-               job_id: int,
-               *,
-               fields_to_remove: Optional[List[str]] = None,
-               new_settings: Optional[JobSettings] = None):
+        timeout=timedelta(minutes=20),
+    ) -> Run:
+        return self.submit(
+            access_control_list=access_control_list,
+            budget_policy_id=budget_policy_id,
+            email_notifications=email_notifications,
+            environments=environments,
+            git_source=git_source,
+            health=health,
+            idempotency_token=idempotency_token,
+            notification_settings=notification_settings,
+            queue=queue,
+            run_as=run_as,
+            run_name=run_name,
+            tasks=tasks,
+            timeout_seconds=timeout_seconds,
+            webhook_notifications=webhook_notifications,
+        ).result(timeout=timeout)
+
+    def update(
+        self,
+        job_id: int,
+        *,
+        fields_to_remove: Optional[List[str]] = None,
+        new_settings: Optional[JobSettings] = None,
+    ):
         """Update job settings partially.
-        
+
         Add, update, or remove specific settings of an existing job. Use the [_Reset_
         endpoint](:method:jobs/reset) to overwrite all job settings.
-        
+
         :param job_id: int
           The canonical identifier of the job to update. This field is required.
         :param fields_to_remove: List[str] (optional)
@@ -8178,121 +9681,156 @@ def update(self,
           tasks and job clusters (`tasks/task_1`). This field is optional.
         :param new_settings: :class:`JobSettings` (optional)
           The new settings for the job.
-          
+
           Top-level fields specified in `new_settings` are completely replaced, except for arrays which are
           merged. That is, new and existing entries are completely replaced based on the respective key
           fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept.
-          
+
           Partially updating nested fields is not supported.
-          
+
           Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other
           fields are applied to future runs only.
-        
-        
+
+
         """
         body = {}
-        if fields_to_remove is not None: body['fields_to_remove'] = [v for v in fields_to_remove]
-        if job_id is not None: body['job_id'] = job_id
-        if new_settings is not None: body['new_settings'] = new_settings.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if fields_to_remove is not None:
+            body["fields_to_remove"] = [v for v in fields_to_remove]
+        if job_id is not None:
+            body["job_id"] = job_id
+        if new_settings is not None:
+            body["new_settings"] = new_settings.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.1/jobs/update', body=body, headers=headers)
+        self._api.do("POST", "/api/2.1/jobs/update", body=body, headers=headers)
 
     def update_permissions(
-            self,
-            job_id: str,
-            *,
-            access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions:
+        self,
+        job_id: str,
+        *,
+        access_control_list: Optional[List[JobAccessControlRequest]] = None,
+    ) -> JobPermissions:
         """Update job permissions.
-        
+
         Updates the permissions on a job. Jobs can inherit permissions from their root object.
-        
+
         :param job_id: str
           The job for which to get or manage permissions.
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-        
+
         :returns: :class:`JobPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/jobs/{job_id}",
+            body=body,
+            headers=headers,
+        )
         return JobPermissions.from_dict(res)
 
 
 class PolicyComplianceForJobsAPI:
     """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
     This API currently only supports compliance controls for cluster policies.
-    
+
     A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
     policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
     edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
     policies.
-    
+
     The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
-    compliance API allows you to update a job so that it becomes compliant with all of its policies."""
+    compliance API allows you to update a job so that it becomes compliant with all of its policies.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def enforce_compliance(self,
-                           job_id: int,
-                           *,
-                           validate_only: Optional[bool] = None) -> EnforcePolicyComplianceResponse:
+    def enforce_compliance(
+        self, job_id: int, *, validate_only: Optional[bool] = None
+    ) -> EnforcePolicyComplianceResponse:
         """Enforce job policy compliance.
-        
+
         Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
         are compliant with the current versions of their respective cluster policies. All-purpose clusters
         used in the job will not be updated.
-        
+
         :param job_id: int
           The ID of the job you want to enforce policy compliance on.
         :param validate_only: bool (optional)
           If set, previews changes made to the job to comply with its policy, but does not update the job.
-        
+
         :returns: :class:`EnforcePolicyComplianceResponse`
         """
         body = {}
-        if job_id is not None: body['job_id'] = job_id
-        if validate_only is not None: body['validate_only'] = validate_only
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/policies/jobs/enforce-compliance', body=body, headers=headers)
+        if job_id is not None:
+            body["job_id"] = job_id
+        if validate_only is not None:
+            body["validate_only"] = validate_only
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/policies/jobs/enforce-compliance",
+            body=body,
+            headers=headers,
+        )
         return EnforcePolicyComplianceResponse.from_dict(res)
 
     def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse:
         """Get job policy compliance.
-        
+
         Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
         they use was updated after the job was last edited and some of its job clusters no longer comply with
         their updated policies.
-        
+
         :param job_id: int
           The ID of the job whose compliance status you are requesting.
-        
+
         :returns: :class:`GetPolicyComplianceResponse`
         """
 
         query = {}
-        if job_id is not None: query['job_id'] = job_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/policies/jobs/get-compliance', query=query, headers=headers)
+        if job_id is not None:
+            query["job_id"] = job_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/policies/jobs/get-compliance",
+            query=query,
+            headers=headers,
+        )
         return GetPolicyComplianceResponse.from_dict(res)
 
-    def list_compliance(self,
-                        policy_id: str,
-                        *,
-                        page_size: Optional[int] = None,
-                        page_token: Optional[str] = None) -> Iterator[JobCompliance]:
+    def list_compliance(
+        self,
+        policy_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[JobCompliance]:
         """List job policy compliance.
-        
+
         Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
         compliance if a cluster policy they use was updated after the job was last edited and its job clusters
         no longer comply with the updated policy.
-        
+
         :param policy_id: str
           Canonical unique identifier for the cluster policy.
         :param page_size: int (optional)
@@ -8301,21 +9839,31 @@ def list_compliance(self,
         :param page_token: str (optional)
           A page token that can be used to navigate to the next page or previous page as returned by
           `next_page_token` or `prev_page_token`.
-        
+
         :returns: Iterator over :class:`JobCompliance`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if policy_id is not None: query['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if policy_id is not None:
+            query["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/policies/jobs/list-compliance', query=query, headers=headers)
-            if 'jobs' in json:
-                for v in json['jobs']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/policies/jobs/list-compliance",
+                query=query,
+                headers=headers,
+            )
+            if "jobs" in json:
+                for v in json["jobs"]:
                     yield JobCompliance.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py
index 239cd2eaf..a8ae22e19 100755
--- a/databricks/sdk/service/marketplace.py
+++ b/databricks/sdk/service/marketplace.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -23,21 +23,28 @@ class AddExchangeForListingRequest:
     def as_dict(self) -> dict:
         """Serializes the AddExchangeForListingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingRequest:
         """Deserializes the AddExchangeForListingRequest from a dictionary."""
-        return cls(exchange_id=d.get('exchange_id', None), listing_id=d.get('listing_id', None))
+        return cls(
+            exchange_id=d.get("exchange_id", None),
+            listing_id=d.get("listing_id", None),
+        )
 
 
 @dataclass
@@ -47,29 +54,31 @@ class AddExchangeForListingResponse:
     def as_dict(self) -> dict:
         """Serializes the AddExchangeForListingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing.as_dict()
+        if self.exchange_for_listing:
+            body["exchange_for_listing"] = self.exchange_for_listing.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AddExchangeForListingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing
+        if self.exchange_for_listing:
+            body["exchange_for_listing"] = self.exchange_for_listing
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingResponse:
         """Deserializes the AddExchangeForListingResponse from a dictionary."""
-        return cls(exchange_for_listing=_from_dict(d, 'exchange_for_listing', ExchangeListing))
+        return cls(exchange_for_listing=_from_dict(d, "exchange_for_listing", ExchangeListing))
 
 
 class AssetType(Enum):
 
-    ASSET_TYPE_DATA_TABLE = 'ASSET_TYPE_DATA_TABLE'
-    ASSET_TYPE_GIT_REPO = 'ASSET_TYPE_GIT_REPO'
-    ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA'
-    ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL'
-    ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK'
-    ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION'
+    ASSET_TYPE_DATA_TABLE = "ASSET_TYPE_DATA_TABLE"
+    ASSET_TYPE_GIT_REPO = "ASSET_TYPE_GIT_REPO"
+    ASSET_TYPE_MEDIA = "ASSET_TYPE_MEDIA"
+    ASSET_TYPE_MODEL = "ASSET_TYPE_MODEL"
+    ASSET_TYPE_NOTEBOOK = "ASSET_TYPE_NOTEBOOK"
+    ASSET_TYPE_PARTNER_INTEGRATION = "ASSET_TYPE_PARTNER_INTEGRATION"
 
 
 @dataclass
@@ -79,19 +88,21 @@ class BatchGetListingsResponse:
     def as_dict(self) -> dict:
         """Serializes the BatchGetListingsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listings: body['listings'] = [v.as_dict() for v in self.listings]
+        if self.listings:
+            body["listings"] = [v.as_dict() for v in self.listings]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BatchGetListingsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listings: body['listings'] = self.listings
+        if self.listings:
+            body["listings"] = self.listings
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BatchGetListingsResponse:
         """Deserializes the BatchGetListingsResponse from a dictionary."""
-        return cls(listings=_repeated_dict(d, 'listings', Listing))
+        return cls(listings=_repeated_dict(d, "listings", Listing))
 
 
 @dataclass
@@ -101,45 +112,47 @@ class BatchGetProvidersResponse:
     def as_dict(self) -> dict:
         """Serializes the BatchGetProvidersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
+        if self.providers:
+            body["providers"] = [v.as_dict() for v in self.providers]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BatchGetProvidersResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.providers: body['providers'] = self.providers
+        if self.providers:
+            body["providers"] = self.providers
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BatchGetProvidersResponse:
         """Deserializes the BatchGetProvidersResponse from a dictionary."""
-        return cls(providers=_repeated_dict(d, 'providers', ProviderInfo))
+        return cls(providers=_repeated_dict(d, "providers", ProviderInfo))
 
 
 class Category(Enum):
 
-    ADVERTISING_AND_MARKETING = 'ADVERTISING_AND_MARKETING'
-    CLIMATE_AND_ENVIRONMENT = 'CLIMATE_AND_ENVIRONMENT'
-    COMMERCE = 'COMMERCE'
-    DEMOGRAPHICS = 'DEMOGRAPHICS'
-    ECONOMICS = 'ECONOMICS'
-    EDUCATION = 'EDUCATION'
-    ENERGY = 'ENERGY'
-    FINANCIAL = 'FINANCIAL'
-    GAMING = 'GAMING'
-    GEOSPATIAL = 'GEOSPATIAL'
-    HEALTH = 'HEALTH'
-    LOOKUP_TABLES = 'LOOKUP_TABLES'
-    MANUFACTURING = 'MANUFACTURING'
-    MEDIA = 'MEDIA'
-    OTHER = 'OTHER'
-    PUBLIC_SECTOR = 'PUBLIC_SECTOR'
-    RETAIL = 'RETAIL'
-    SCIENCE_AND_RESEARCH = 'SCIENCE_AND_RESEARCH'
-    SECURITY = 'SECURITY'
-    SPORTS = 'SPORTS'
-    TRANSPORTATION_AND_LOGISTICS = 'TRANSPORTATION_AND_LOGISTICS'
-    TRAVEL_AND_TOURISM = 'TRAVEL_AND_TOURISM'
+    ADVERTISING_AND_MARKETING = "ADVERTISING_AND_MARKETING"
+    CLIMATE_AND_ENVIRONMENT = "CLIMATE_AND_ENVIRONMENT"
+    COMMERCE = "COMMERCE"
+    DEMOGRAPHICS = "DEMOGRAPHICS"
+    ECONOMICS = "ECONOMICS"
+    EDUCATION = "EDUCATION"
+    ENERGY = "ENERGY"
+    FINANCIAL = "FINANCIAL"
+    GAMING = "GAMING"
+    GEOSPATIAL = "GEOSPATIAL"
+    HEALTH = "HEALTH"
+    LOOKUP_TABLES = "LOOKUP_TABLES"
+    MANUFACTURING = "MANUFACTURING"
+    MEDIA = "MEDIA"
+    OTHER = "OTHER"
+    PUBLIC_SECTOR = "PUBLIC_SECTOR"
+    RETAIL = "RETAIL"
+    SCIENCE_AND_RESEARCH = "SCIENCE_AND_RESEARCH"
+    SECURITY = "SECURITY"
+    SPORTS = "SPORTS"
+    TRANSPORTATION_AND_LOGISTICS = "TRANSPORTATION_AND_LOGISTICS"
+    TRAVEL_AND_TOURISM = "TRAVEL_AND_TOURISM"
 
 
 @dataclass
@@ -149,19 +162,21 @@ class ConsumerTerms:
     def as_dict(self) -> dict:
         """Serializes the ConsumerTerms into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.version is not None: body['version'] = self.version
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ConsumerTerms into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.version is not None: body['version'] = self.version
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConsumerTerms:
         """Deserializes the ConsumerTerms from a dictionary."""
-        return cls(version=d.get('version', None))
+        return cls(version=d.get("version", None))
 
 
 @dataclass
@@ -179,34 +194,44 @@ class ContactInfo:
     def as_dict(self) -> dict:
         """Serializes the ContactInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.company is not None: body['company'] = self.company
-        if self.email is not None: body['email'] = self.email
-        if self.first_name is not None: body['first_name'] = self.first_name
-        if self.last_name is not None: body['last_name'] = self.last_name
+        if self.company is not None:
+            body["company"] = self.company
+        if self.email is not None:
+            body["email"] = self.email
+        if self.first_name is not None:
+            body["first_name"] = self.first_name
+        if self.last_name is not None:
+            body["last_name"] = self.last_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ContactInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.company is not None: body['company'] = self.company
-        if self.email is not None: body['email'] = self.email
-        if self.first_name is not None: body['first_name'] = self.first_name
-        if self.last_name is not None: body['last_name'] = self.last_name
+        if self.company is not None:
+            body["company"] = self.company
+        if self.email is not None:
+            body["email"] = self.email
+        if self.first_name is not None:
+            body["first_name"] = self.first_name
+        if self.last_name is not None:
+            body["last_name"] = self.last_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContactInfo:
         """Deserializes the ContactInfo from a dictionary."""
-        return cls(company=d.get('company', None),
-                   email=d.get('email', None),
-                   first_name=d.get('first_name', None),
-                   last_name=d.get('last_name', None))
+        return cls(
+            company=d.get("company", None),
+            email=d.get("email", None),
+            first_name=d.get("first_name", None),
+            last_name=d.get("last_name", None),
+        )
 
 
 class Cost(Enum):
 
-    FREE = 'FREE'
-    PAID = 'PAID'
+    FREE = "FREE"
+    PAID = "PAID"
 
 
 @dataclass
@@ -216,19 +241,21 @@ class CreateExchangeFilterRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateExchangeFilterRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filter: body['filter'] = self.filter.as_dict()
+        if self.filter:
+            body["filter"] = self.filter.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.filter: body['filter'] = self.filter
+        if self.filter:
+            body["filter"] = self.filter
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterRequest:
         """Deserializes the CreateExchangeFilterRequest from a dictionary."""
-        return cls(filter=_from_dict(d, 'filter', ExchangeFilter))
+        return cls(filter=_from_dict(d, "filter", ExchangeFilter))
 
 
 @dataclass
@@ -238,19 +265,21 @@ class CreateExchangeFilterResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateExchangeFilterResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filter_id is not None: body['filter_id'] = self.filter_id
+        if self.filter_id is not None:
+            body["filter_id"] = self.filter_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.filter_id is not None: body['filter_id'] = self.filter_id
+        if self.filter_id is not None:
+            body["filter_id"] = self.filter_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterResponse:
         """Deserializes the CreateExchangeFilterResponse from a dictionary."""
-        return cls(filter_id=d.get('filter_id', None))
+        return cls(filter_id=d.get("filter_id", None))
 
 
 @dataclass
@@ -260,19 +289,21 @@ class CreateExchangeRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateExchangeRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange.as_dict()
+        if self.exchange:
+            body["exchange"] = self.exchange.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange
+        if self.exchange:
+            body["exchange"] = self.exchange
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeRequest:
         """Deserializes the CreateExchangeRequest from a dictionary."""
-        return cls(exchange=_from_dict(d, 'exchange', Exchange))
+        return cls(exchange=_from_dict(d, "exchange", Exchange))
 
 
 @dataclass
@@ -282,19 +313,21 @@ class CreateExchangeResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateExchangeResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExchangeResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeResponse:
         """Deserializes the CreateExchangeResponse from a dictionary."""
-        return cls(exchange_id=d.get('exchange_id', None))
+        return cls(exchange_id=d.get("exchange_id", None))
 
 
 @dataclass
@@ -310,29 +343,38 @@ class CreateFileRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateFileRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.file_parent: body['file_parent'] = self.file_parent.as_dict()
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.file_parent:
+            body["file_parent"] = self.file_parent.as_dict()
         if self.marketplace_file_type is not None:
-            body['marketplace_file_type'] = self.marketplace_file_type.value
-        if self.mime_type is not None: body['mime_type'] = self.mime_type
+            body["marketplace_file_type"] = self.marketplace_file_type.value
+        if self.mime_type is not None:
+            body["mime_type"] = self.mime_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.file_parent: body['file_parent'] = self.file_parent
-        if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type
-        if self.mime_type is not None: body['mime_type'] = self.mime_type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.file_parent:
+            body["file_parent"] = self.file_parent
+        if self.marketplace_file_type is not None:
+            body["marketplace_file_type"] = self.marketplace_file_type
+        if self.mime_type is not None:
+            body["mime_type"] = self.mime_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFileRequest:
         """Deserializes the CreateFileRequest from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   file_parent=_from_dict(d, 'file_parent', FileParent),
-                   marketplace_file_type=_enum(d, 'marketplace_file_type', MarketplaceFileType),
-                   mime_type=d.get('mime_type', None))
+        return cls(
+            display_name=d.get("display_name", None),
+            file_parent=_from_dict(d, "file_parent", FileParent),
+            marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType),
+            mime_type=d.get("mime_type", None),
+        )
 
 
 @dataclass
@@ -345,21 +387,28 @@ class CreateFileResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateFileResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_info: body['file_info'] = self.file_info.as_dict()
-        if self.upload_url is not None: body['upload_url'] = self.upload_url
+        if self.file_info:
+            body["file_info"] = self.file_info.as_dict()
+        if self.upload_url is not None:
+            body["upload_url"] = self.upload_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateFileResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_info: body['file_info'] = self.file_info
-        if self.upload_url is not None: body['upload_url'] = self.upload_url
+        if self.file_info:
+            body["file_info"] = self.file_info
+        if self.upload_url is not None:
+            body["upload_url"] = self.upload_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFileResponse:
         """Deserializes the CreateFileResponse from a dictionary."""
-        return cls(file_info=_from_dict(d, 'file_info', FileInfo), upload_url=d.get('upload_url', None))
+        return cls(
+            file_info=_from_dict(d, "file_info", FileInfo),
+            upload_url=d.get("upload_url", None),
+        )
 
 
 @dataclass
@@ -381,34 +430,47 @@ def as_dict(self) -> dict:
         """Serializes the CreateInstallationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.accepted_consumer_terms:
-            body['accepted_consumer_terms'] = self.accepted_consumer_terms.as_dict()
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
-        if self.repo_detail: body['repo_detail'] = self.repo_detail.as_dict()
-        if self.share_name is not None: body['share_name'] = self.share_name
+            body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict()
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type.value
+        if self.repo_detail:
+            body["repo_detail"] = self.repo_detail.as_dict()
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
-        if self.repo_detail: body['repo_detail'] = self.repo_detail
-        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.accepted_consumer_terms:
+            body["accepted_consumer_terms"] = self.accepted_consumer_terms
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type
+        if self.repo_detail:
+            body["repo_detail"] = self.repo_detail
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstallationRequest:
         """Deserializes the CreateInstallationRequest from a dictionary."""
-        return cls(accepted_consumer_terms=_from_dict(d, 'accepted_consumer_terms', ConsumerTerms),
-                   catalog_name=d.get('catalog_name', None),
-                   listing_id=d.get('listing_id', None),
-                   recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType),
-                   repo_detail=_from_dict(d, 'repo_detail', RepoInstallation),
-                   share_name=d.get('share_name', None))
+        return cls(
+            accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms),
+            catalog_name=d.get("catalog_name", None),
+            listing_id=d.get("listing_id", None),
+            recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType),
+            repo_detail=_from_dict(d, "repo_detail", RepoInstallation),
+            share_name=d.get("share_name", None),
+        )
 
 
 @dataclass
@@ -418,19 +480,21 @@ class CreateListingRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateListingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listing: body['listing'] = self.listing.as_dict()
+        if self.listing:
+            body["listing"] = self.listing.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listing: body['listing'] = self.listing
+        if self.listing:
+            body["listing"] = self.listing
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateListingRequest:
         """Deserializes the CreateListingRequest from a dictionary."""
-        return cls(listing=_from_dict(d, 'listing', Listing))
+        return cls(listing=_from_dict(d, "listing", Listing))
 
 
 @dataclass
@@ -440,19 +504,21 @@ class CreateListingResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateListingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateListingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateListingResponse:
         """Deserializes the CreateListingResponse from a dictionary."""
-        return cls(listing_id=d.get('listing_id', None))
+        return cls(listing_id=d.get("listing_id", None))
 
 
 @dataclass
@@ -481,43 +547,62 @@ def as_dict(self) -> dict:
         """Serializes the CreatePersonalizationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.accepted_consumer_terms:
-            body['accepted_consumer_terms'] = self.accepted_consumer_terms.as_dict()
-        if self.comment is not None: body['comment'] = self.comment
-        if self.company is not None: body['company'] = self.company
-        if self.first_name is not None: body['first_name'] = self.first_name
-        if self.intended_use is not None: body['intended_use'] = self.intended_use
-        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
-        if self.last_name is not None: body['last_name'] = self.last_name
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
+            body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.company is not None:
+            body["company"] = self.company
+        if self.first_name is not None:
+            body["first_name"] = self.first_name
+        if self.intended_use is not None:
+            body["intended_use"] = self.intended_use
+        if self.is_from_lighthouse is not None:
+            body["is_from_lighthouse"] = self.is_from_lighthouse
+        if self.last_name is not None:
+            body["last_name"] = self.last_name
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms
-        if self.comment is not None: body['comment'] = self.comment
-        if self.company is not None: body['company'] = self.company
-        if self.first_name is not None: body['first_name'] = self.first_name
-        if self.intended_use is not None: body['intended_use'] = self.intended_use
-        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
-        if self.last_name is not None: body['last_name'] = self.last_name
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.accepted_consumer_terms:
+            body["accepted_consumer_terms"] = self.accepted_consumer_terms
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.company is not None:
+            body["company"] = self.company
+        if self.first_name is not None:
+            body["first_name"] = self.first_name
+        if self.intended_use is not None:
+            body["intended_use"] = self.intended_use
+        if self.is_from_lighthouse is not None:
+            body["is_from_lighthouse"] = self.is_from_lighthouse
+        if self.last_name is not None:
+            body["last_name"] = self.last_name
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequest:
         """Deserializes the CreatePersonalizationRequest from a dictionary."""
-        return cls(accepted_consumer_terms=_from_dict(d, 'accepted_consumer_terms', ConsumerTerms),
-                   comment=d.get('comment', None),
-                   company=d.get('company', None),
-                   first_name=d.get('first_name', None),
-                   intended_use=d.get('intended_use', None),
-                   is_from_lighthouse=d.get('is_from_lighthouse', None),
-                   last_name=d.get('last_name', None),
-                   listing_id=d.get('listing_id', None),
-                   recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType))
+        return cls(
+            accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms),
+            comment=d.get("comment", None),
+            company=d.get("company", None),
+            first_name=d.get("first_name", None),
+            intended_use=d.get("intended_use", None),
+            is_from_lighthouse=d.get("is_from_lighthouse", None),
+            last_name=d.get("last_name", None),
+            listing_id=d.get("listing_id", None),
+            recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType),
+        )
 
 
 @dataclass
@@ -527,19 +612,21 @@ class CreatePersonalizationRequestResponse:
     def as_dict(self) -> dict:
         """Serializes the CreatePersonalizationRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequestResponse:
         """Deserializes the CreatePersonalizationRequestResponse from a dictionary."""
-        return cls(id=d.get('id', None))
+        return cls(id=d.get("id", None))
 
 
 @dataclass
@@ -549,19 +636,21 @@ class CreateProviderRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateProviderRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.provider: body['provider'] = self.provider.as_dict()
+        if self.provider:
+            body["provider"] = self.provider.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.provider: body['provider'] = self.provider
+        if self.provider:
+            body["provider"] = self.provider
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProviderRequest:
         """Deserializes the CreateProviderRequest from a dictionary."""
-        return cls(provider=_from_dict(d, 'provider', ProviderInfo))
+        return cls(provider=_from_dict(d, "provider", ProviderInfo))
 
 
 @dataclass
@@ -571,32 +660,34 @@ class CreateProviderResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateProviderResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateProviderResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProviderResponse:
         """Deserializes the CreateProviderResponse from a dictionary."""
-        return cls(id=d.get('id', None))
+        return cls(id=d.get("id", None))
 
 
 class DataRefresh(Enum):
 
-    DAILY = 'DAILY'
-    HOURLY = 'HOURLY'
-    MINUTE = 'MINUTE'
-    MONTHLY = 'MONTHLY'
-    NONE = 'NONE'
-    QUARTERLY = 'QUARTERLY'
-    SECOND = 'SECOND'
-    WEEKLY = 'WEEKLY'
-    YEARLY = 'YEARLY'
+    DAILY = "DAILY"
+    HOURLY = "HOURLY"
+    MINUTE = "MINUTE"
+    MONTHLY = "MONTHLY"
+    NONE = "NONE"
+    QUARTERLY = "QUARTERLY"
+    SECOND = "SECOND"
+    WEEKLY = "WEEKLY"
+    YEARLY = "YEARLY"
 
 
 @dataclass
@@ -608,21 +699,28 @@ class DataRefreshInfo:
     def as_dict(self) -> dict:
         """Serializes the DataRefreshInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.interval is not None: body['interval'] = self.interval
-        if self.unit is not None: body['unit'] = self.unit.value
+        if self.interval is not None:
+            body["interval"] = self.interval
+        if self.unit is not None:
+            body["unit"] = self.unit.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DataRefreshInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.interval is not None: body['interval'] = self.interval
-        if self.unit is not None: body['unit'] = self.unit
+        if self.interval is not None:
+            body["interval"] = self.interval
+        if self.unit is not None:
+            body["unit"] = self.unit
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataRefreshInfo:
         """Deserializes the DataRefreshInfo from a dictionary."""
-        return cls(interval=d.get('interval', None), unit=_enum(d, 'unit', DataRefresh))
+        return cls(
+            interval=d.get("interval", None),
+            unit=_enum(d, "unit", DataRefresh),
+        )
 
 
 @dataclass
@@ -741,8 +839,8 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteProviderResponse:
 
 class DeltaSharingRecipientType(Enum):
 
-    DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS = 'DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS'
-    DELTA_SHARING_RECIPIENT_TYPE_OPEN = 'DELTA_SHARING_RECIPIENT_TYPE_OPEN'
+    DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS = "DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS"
+    DELTA_SHARING_RECIPIENT_TYPE_OPEN = "DELTA_SHARING_RECIPIENT_TYPE_OPEN"
 
 
 @dataclass
@@ -768,43 +866,63 @@ class Exchange:
     def as_dict(self) -> dict:
         """Serializes the Exchange into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.filters: body['filters'] = [v.as_dict() for v in self.filters]
-        if self.id is not None: body['id'] = self.id
-        if self.linked_listings: body['linked_listings'] = [v.as_dict() for v in self.linked_listings]
-        if self.name is not None: body['name'] = self.name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.filters:
+            body["filters"] = [v.as_dict() for v in self.filters]
+        if self.id is not None:
+            body["id"] = self.id
+        if self.linked_listings:
+            body["linked_listings"] = [v.as_dict() for v in self.linked_listings]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Exchange into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.filters: body['filters'] = self.filters
-        if self.id is not None: body['id'] = self.id
-        if self.linked_listings: body['linked_listings'] = self.linked_listings
-        if self.name is not None: body['name'] = self.name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.filters:
+            body["filters"] = self.filters
+        if self.id is not None:
+            body["id"] = self.id
+        if self.linked_listings:
+            body["linked_listings"] = self.linked_listings
+        if self.name is not None:
+            body["name"] = self.name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Exchange:
         """Deserializes the Exchange from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   filters=_repeated_dict(d, 'filters', ExchangeFilter),
-                   id=d.get('id', None),
-                   linked_listings=_repeated_dict(d, 'linked_listings', ExchangeListing),
-                   name=d.get('name', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            filters=_repeated_dict(d, "filters", ExchangeFilter),
+            id=d.get("id", None),
+            linked_listings=_repeated_dict(d, "linked_listings", ExchangeListing),
+            name=d.get("name", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -830,48 +948,68 @@ class ExchangeFilter:
     def as_dict(self) -> dict:
         """Serializes the ExchangeFilter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
-        if self.filter_type is not None: body['filter_type'] = self.filter_type.value
-        if self.filter_value is not None: body['filter_value'] = self.filter_value
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
+        if self.filter_type is not None:
+            body["filter_type"] = self.filter_type.value
+        if self.filter_value is not None:
+            body["filter_value"] = self.filter_value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExchangeFilter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
-        if self.filter_type is not None: body['filter_type'] = self.filter_type
-        if self.filter_value is not None: body['filter_value'] = self.filter_value
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
+        if self.filter_type is not None:
+            body["filter_type"] = self.filter_type
+        if self.filter_value is not None:
+            body["filter_value"] = self.filter_value
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeFilter:
         """Deserializes the ExchangeFilter from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   exchange_id=d.get('exchange_id', None),
-                   filter_type=_enum(d, 'filter_type', ExchangeFilterType),
-                   filter_value=d.get('filter_value', None),
-                   id=d.get('id', None),
-                   name=d.get('name', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            exchange_id=d.get("exchange_id", None),
+            filter_type=_enum(d, "filter_type", ExchangeFilterType),
+            filter_value=d.get("filter_value", None),
+            id=d.get("id", None),
+            name=d.get("name", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 class ExchangeFilterType(Enum):
 
-    GLOBAL_METASTORE_ID = 'GLOBAL_METASTORE_ID'
+    GLOBAL_METASTORE_ID = "GLOBAL_METASTORE_ID"
 
 
 @dataclass
@@ -893,37 +1031,53 @@ class ExchangeListing:
     def as_dict(self) -> dict:
         """Serializes the ExchangeListing into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
-        if self.exchange_name is not None: body['exchange_name'] = self.exchange_name
-        if self.id is not None: body['id'] = self.id
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
+        if self.exchange_name is not None:
+            body["exchange_name"] = self.exchange_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.listing_name is not None:
+            body["listing_name"] = self.listing_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExchangeListing into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
-        if self.exchange_name is not None: body['exchange_name'] = self.exchange_name
-        if self.id is not None: body['id'] = self.id
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.exchange_id is not None:
+            body["exchange_id"] = self.exchange_id
+        if self.exchange_name is not None:
+            body["exchange_name"] = self.exchange_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.listing_name is not None:
+            body["listing_name"] = self.listing_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeListing:
         """Deserializes the ExchangeListing from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   exchange_id=d.get('exchange_id', None),
-                   exchange_name=d.get('exchange_name', None),
-                   id=d.get('id', None),
-                   listing_id=d.get('listing_id', None),
-                   listing_name=d.get('listing_name', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            exchange_id=d.get("exchange_id", None),
+            exchange_name=d.get("exchange_name", None),
+            id=d.get("id", None),
+            listing_id=d.get("listing_id", None),
+            listing_name=d.get("listing_name", None),
+        )
 
 
 @dataclass
@@ -953,47 +1107,68 @@ class FileInfo:
     def as_dict(self) -> dict:
         """Serializes the FileInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.download_link is not None: body['download_link'] = self.download_link
-        if self.file_parent: body['file_parent'] = self.file_parent.as_dict()
-        if self.id is not None: body['id'] = self.id
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.download_link is not None:
+            body["download_link"] = self.download_link
+        if self.file_parent:
+            body["file_parent"] = self.file_parent.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
         if self.marketplace_file_type is not None:
-            body['marketplace_file_type'] = self.marketplace_file_type.value
-        if self.mime_type is not None: body['mime_type'] = self.mime_type
-        if self.status is not None: body['status'] = self.status.value
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+            body["marketplace_file_type"] = self.marketplace_file_type.value
+        if self.mime_type is not None:
+            body["mime_type"] = self.mime_type
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.download_link is not None: body['download_link'] = self.download_link
-        if self.file_parent: body['file_parent'] = self.file_parent
-        if self.id is not None: body['id'] = self.id
-        if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type
-        if self.mime_type is not None: body['mime_type'] = self.mime_type
-        if self.status is not None: body['status'] = self.status
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.download_link is not None:
+            body["download_link"] = self.download_link
+        if self.file_parent:
+            body["file_parent"] = self.file_parent
+        if self.id is not None:
+            body["id"] = self.id
+        if self.marketplace_file_type is not None:
+            body["marketplace_file_type"] = self.marketplace_file_type
+        if self.mime_type is not None:
+            body["mime_type"] = self.mime_type
+        if self.status is not None:
+            body["status"] = self.status
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   display_name=d.get('display_name', None),
-                   download_link=d.get('download_link', None),
-                   file_parent=_from_dict(d, 'file_parent', FileParent),
-                   id=d.get('id', None),
-                   marketplace_file_type=_enum(d, 'marketplace_file_type', MarketplaceFileType),
-                   mime_type=d.get('mime_type', None),
-                   status=_enum(d, 'status', FileStatus),
-                   status_message=d.get('status_message', None),
-                   updated_at=d.get('updated_at', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            display_name=d.get("display_name", None),
+            download_link=d.get("download_link", None),
+            file_parent=_from_dict(d, "file_parent", FileParent),
+            id=d.get("id", None),
+            marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType),
+            mime_type=d.get("mime_type", None),
+            status=_enum(d, "status", FileStatus),
+            status_message=d.get("status_message", None),
+            updated_at=d.get("updated_at", None),
+        )
 
 
 @dataclass
@@ -1006,42 +1181,48 @@ class FileParent:
     def as_dict(self) -> dict:
         """Serializes the FileParent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type.value
-        if self.parent_id is not None: body['parent_id'] = self.parent_id
+        if self.file_parent_type is not None:
+            body["file_parent_type"] = self.file_parent_type.value
+        if self.parent_id is not None:
+            body["parent_id"] = self.parent_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FileParent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type
-        if self.parent_id is not None: body['parent_id'] = self.parent_id
+        if self.file_parent_type is not None:
+            body["file_parent_type"] = self.file_parent_type
+        if self.parent_id is not None:
+            body["parent_id"] = self.parent_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileParent:
         """Deserializes the FileParent from a dictionary."""
-        return cls(file_parent_type=_enum(d, 'file_parent_type', FileParentType),
-                   parent_id=d.get('parent_id', None))
+        return cls(
+            file_parent_type=_enum(d, "file_parent_type", FileParentType),
+            parent_id=d.get("parent_id", None),
+        )
 
 
 class FileParentType(Enum):
 
-    LISTING = 'LISTING'
-    PROVIDER = 'PROVIDER'
+    LISTING = "LISTING"
+    PROVIDER = "PROVIDER"
 
 
 class FileStatus(Enum):
 
-    FILE_STATUS_PUBLISHED = 'FILE_STATUS_PUBLISHED'
-    FILE_STATUS_SANITIZATION_FAILED = 'FILE_STATUS_SANITIZATION_FAILED'
-    FILE_STATUS_SANITIZING = 'FILE_STATUS_SANITIZING'
-    FILE_STATUS_STAGING = 'FILE_STATUS_STAGING'
+    FILE_STATUS_PUBLISHED = "FILE_STATUS_PUBLISHED"
+    FILE_STATUS_SANITIZATION_FAILED = "FILE_STATUS_SANITIZATION_FAILED"
+    FILE_STATUS_SANITIZING = "FILE_STATUS_SANITIZING"
+    FILE_STATUS_STAGING = "FILE_STATUS_STAGING"
 
 
 class FulfillmentType(Enum):
 
-    INSTALL = 'INSTALL'
-    REQUEST_ACCESS = 'REQUEST_ACCESS'
+    INSTALL = "INSTALL"
+    REQUEST_ACCESS = "REQUEST_ACCESS"
 
 
 @dataclass
@@ -1051,19 +1232,21 @@ class GetExchangeResponse:
     def as_dict(self) -> dict:
         """Serializes the GetExchangeResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange.as_dict()
+        if self.exchange:
+            body["exchange"] = self.exchange.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetExchangeResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange
+        if self.exchange:
+            body["exchange"] = self.exchange
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExchangeResponse:
         """Deserializes the GetExchangeResponse from a dictionary."""
-        return cls(exchange=_from_dict(d, 'exchange', Exchange))
+        return cls(exchange=_from_dict(d, "exchange", Exchange))
 
 
 @dataclass
@@ -1073,19 +1256,21 @@ class GetFileResponse:
     def as_dict(self) -> dict:
         """Serializes the GetFileResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_info: body['file_info'] = self.file_info.as_dict()
+        if self.file_info:
+            body["file_info"] = self.file_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetFileResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_info: body['file_info'] = self.file_info
+        if self.file_info:
+            body["file_info"] = self.file_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetFileResponse:
         """Deserializes the GetFileResponse from a dictionary."""
-        return cls(file_info=_from_dict(d, 'file_info', FileInfo))
+        return cls(file_info=_from_dict(d, "file_info", FileInfo))
 
 
 @dataclass
@@ -1096,19 +1281,21 @@ class GetLatestVersionProviderAnalyticsDashboardResponse:
     def as_dict(self) -> dict:
         """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.version is not None: body['version'] = self.version
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.version is not None: body['version'] = self.version
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionProviderAnalyticsDashboardResponse:
         """Deserializes the GetLatestVersionProviderAnalyticsDashboardResponse from a dictionary."""
-        return cls(version=d.get('version', None))
+        return cls(version=d.get("version", None))
 
 
 @dataclass
@@ -1120,23 +1307,28 @@ class GetListingContentMetadataResponse:
     def as_dict(self) -> dict:
         """Serializes the GetListingContentMetadataResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         if self.shared_data_objects:
-            body['shared_data_objects'] = [v.as_dict() for v in self.shared_data_objects]
+            body["shared_data_objects"] = [v.as_dict() for v in self.shared_data_objects]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetListingContentMetadataResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.shared_data_objects: body['shared_data_objects'] = self.shared_data_objects
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.shared_data_objects:
+            body["shared_data_objects"] = self.shared_data_objects
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingContentMetadataResponse:
         """Deserializes the GetListingContentMetadataResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   shared_data_objects=_repeated_dict(d, 'shared_data_objects', SharedDataObject))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            shared_data_objects=_repeated_dict(d, "shared_data_objects", SharedDataObject),
+        )
 
 
 @dataclass
@@ -1146,19 +1338,21 @@ class GetListingResponse:
     def as_dict(self) -> dict:
         """Serializes the GetListingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listing: body['listing'] = self.listing.as_dict()
+        if self.listing:
+            body["listing"] = self.listing.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetListingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listing: body['listing'] = self.listing
+        if self.listing:
+            body["listing"] = self.listing
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingResponse:
         """Deserializes the GetListingResponse from a dictionary."""
-        return cls(listing=_from_dict(d, 'listing', Listing))
+        return cls(listing=_from_dict(d, "listing", Listing))
 
 
 @dataclass
@@ -1170,22 +1364,28 @@ class GetListingsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetListingsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listings: body['listings'] = [v.as_dict() for v in self.listings]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.listings:
+            body["listings"] = [v.as_dict() for v in self.listings]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetListingsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listings: body['listings'] = self.listings
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.listings:
+            body["listings"] = self.listings
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingsResponse:
         """Deserializes the GetListingsResponse from a dictionary."""
-        return cls(listings=_repeated_dict(d, 'listings', Listing),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            listings=_repeated_dict(d, "listings", Listing),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1196,20 +1396,20 @@ def as_dict(self) -> dict:
         """Serializes the GetPersonalizationRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.personalization_requests:
-            body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests]
+            body["personalization_requests"] = [v.as_dict() for v in self.personalization_requests]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.personalization_requests: body['personalization_requests'] = self.personalization_requests
+        if self.personalization_requests:
+            body["personalization_requests"] = self.personalization_requests
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPersonalizationRequestResponse:
         """Deserializes the GetPersonalizationRequestResponse from a dictionary."""
-        return cls(
-            personalization_requests=_repeated_dict(d, 'personalization_requests', PersonalizationRequest))
+        return cls(personalization_requests=_repeated_dict(d, "personalization_requests", PersonalizationRequest))
 
 
 @dataclass
@@ -1219,19 +1419,21 @@ class GetProviderResponse:
     def as_dict(self) -> dict:
         """Serializes the GetProviderResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.provider: body['provider'] = self.provider.as_dict()
+        if self.provider:
+            body["provider"] = self.provider.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetProviderResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.provider: body['provider'] = self.provider
+        if self.provider:
+            body["provider"] = self.provider
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetProviderResponse:
         """Deserializes the GetProviderResponse from a dictionary."""
-        return cls(provider=_from_dict(d, 'provider', ProviderInfo))
+        return cls(provider=_from_dict(d, "provider", ProviderInfo))
 
 
 @dataclass
@@ -1241,19 +1443,21 @@ class Installation:
     def as_dict(self) -> dict:
         """Serializes the Installation into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.installation: body['installation'] = self.installation.as_dict()
+        if self.installation:
+            body["installation"] = self.installation.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Installation into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.installation: body['installation'] = self.installation
+        if self.installation:
+            body["installation"] = self.installation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Installation:
         """Deserializes the Installation from a dictionary."""
-        return cls(installation=_from_dict(d, 'installation', InstallationDetail))
+        return cls(installation=_from_dict(d, "installation", InstallationDetail))
 
 
 @dataclass
@@ -1287,61 +1491,89 @@ class InstallationDetail:
     def as_dict(self) -> dict:
         """Serializes the InstallationDetail into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.id is not None: body['id'] = self.id
-        if self.installed_on is not None: body['installed_on'] = self.installed_on
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.listing_name is not None: body['listing_name'] = self.listing_name
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
-        if self.repo_name is not None: body['repo_name'] = self.repo_name
-        if self.repo_path is not None: body['repo_path'] = self.repo_path
-        if self.share_name is not None: body['share_name'] = self.share_name
-        if self.status is not None: body['status'] = self.status.value
-        if self.token_detail: body['token_detail'] = self.token_detail.as_dict()
-        if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens]
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.id is not None:
+            body["id"] = self.id
+        if self.installed_on is not None:
+            body["installed_on"] = self.installed_on
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.listing_name is not None:
+            body["listing_name"] = self.listing_name
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type.value
+        if self.repo_name is not None:
+            body["repo_name"] = self.repo_name
+        if self.repo_path is not None:
+            body["repo_path"] = self.repo_path
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.token_detail:
+            body["token_detail"] = self.token_detail.as_dict()
+        if self.tokens:
+            body["tokens"] = [v.as_dict() for v in self.tokens]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InstallationDetail into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.id is not None: body['id'] = self.id
-        if self.installed_on is not None: body['installed_on'] = self.installed_on
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.listing_name is not None: body['listing_name'] = self.listing_name
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
-        if self.repo_name is not None: body['repo_name'] = self.repo_name
-        if self.repo_path is not None: body['repo_path'] = self.repo_path
-        if self.share_name is not None: body['share_name'] = self.share_name
-        if self.status is not None: body['status'] = self.status
-        if self.token_detail: body['token_detail'] = self.token_detail
-        if self.tokens: body['tokens'] = self.tokens
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.id is not None:
+            body["id"] = self.id
+        if self.installed_on is not None:
+            body["installed_on"] = self.installed_on
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.listing_name is not None:
+            body["listing_name"] = self.listing_name
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type
+        if self.repo_name is not None:
+            body["repo_name"] = self.repo_name
+        if self.repo_path is not None:
+            body["repo_path"] = self.repo_path
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
+        if self.status is not None:
+            body["status"] = self.status
+        if self.token_detail:
+            body["token_detail"] = self.token_detail
+        if self.tokens:
+            body["tokens"] = self.tokens
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallationDetail:
         """Deserializes the InstallationDetail from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   error_message=d.get('error_message', None),
-                   id=d.get('id', None),
-                   installed_on=d.get('installed_on', None),
-                   listing_id=d.get('listing_id', None),
-                   listing_name=d.get('listing_name', None),
-                   recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType),
-                   repo_name=d.get('repo_name', None),
-                   repo_path=d.get('repo_path', None),
-                   share_name=d.get('share_name', None),
-                   status=_enum(d, 'status', InstallationStatus),
-                   token_detail=_from_dict(d, 'token_detail', TokenDetail),
-                   tokens=_repeated_dict(d, 'tokens', TokenInfo))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            error_message=d.get("error_message", None),
+            id=d.get("id", None),
+            installed_on=d.get("installed_on", None),
+            listing_id=d.get("listing_id", None),
+            listing_name=d.get("listing_name", None),
+            recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType),
+            repo_name=d.get("repo_name", None),
+            repo_path=d.get("repo_path", None),
+            share_name=d.get("share_name", None),
+            status=_enum(d, "status", InstallationStatus),
+            token_detail=_from_dict(d, "token_detail", TokenDetail),
+            tokens=_repeated_dict(d, "tokens", TokenInfo),
+        )
 
 
 class InstallationStatus(Enum):
 
-    FAILED = 'FAILED'
-    INSTALLED = 'INSTALLED'
+    FAILED = "FAILED"
+    INSTALLED = "INSTALLED"
 
 
 @dataclass
@@ -1353,22 +1585,28 @@ class ListAllInstallationsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAllInstallationsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.installations: body['installations'] = [v.as_dict() for v in self.installations]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.installations:
+            body["installations"] = [v.as_dict() for v in self.installations]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAllInstallationsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.installations: body['installations'] = self.installations
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.installations:
+            body["installations"] = self.installations
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllInstallationsResponse:
         """Deserializes the ListAllInstallationsResponse from a dictionary."""
-        return cls(installations=_repeated_dict(d, 'installations', InstallationDetail),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            installations=_repeated_dict(d, "installations", InstallationDetail),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1380,24 +1618,28 @@ class ListAllPersonalizationRequestsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAllPersonalizationRequestsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         if self.personalization_requests:
-            body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests]
+            body["personalization_requests"] = [v.as_dict() for v in self.personalization_requests]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAllPersonalizationRequestsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.personalization_requests: body['personalization_requests'] = self.personalization_requests
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.personalization_requests:
+            body["personalization_requests"] = self.personalization_requests
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllPersonalizationRequestsResponse:
         """Deserializes the ListAllPersonalizationRequestsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   personalization_requests=_repeated_dict(d, 'personalization_requests',
-                                                           PersonalizationRequest))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            personalization_requests=_repeated_dict(d, "personalization_requests", PersonalizationRequest),
+        )
 
 
 @dataclass
@@ -1409,22 +1651,28 @@ class ListExchangeFiltersResponse:
     def as_dict(self) -> dict:
         """Serializes the ListExchangeFiltersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filters: body['filters'] = [v.as_dict() for v in self.filters]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.filters:
+            body["filters"] = [v.as_dict() for v in self.filters]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListExchangeFiltersResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.filters: body['filters'] = self.filters
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.filters:
+            body["filters"] = self.filters
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangeFiltersResponse:
         """Deserializes the ListExchangeFiltersResponse from a dictionary."""
-        return cls(filters=_repeated_dict(d, 'filters', ExchangeFilter),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            filters=_repeated_dict(d, "filters", ExchangeFilter),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1436,22 +1684,28 @@ class ListExchangesForListingResponse:
     def as_dict(self) -> dict:
         """Serializes the ListExchangesForListingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange_listing: body['exchange_listing'] = [v.as_dict() for v in self.exchange_listing]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.exchange_listing:
+            body["exchange_listing"] = [v.as_dict() for v in self.exchange_listing]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListExchangesForListingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange_listing: body['exchange_listing'] = self.exchange_listing
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.exchange_listing:
+            body["exchange_listing"] = self.exchange_listing
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangesForListingResponse:
         """Deserializes the ListExchangesForListingResponse from a dictionary."""
-        return cls(exchange_listing=_repeated_dict(d, 'exchange_listing', ExchangeListing),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            exchange_listing=_repeated_dict(d, "exchange_listing", ExchangeListing),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1463,22 +1717,28 @@ class ListExchangesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListExchangesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchanges: body['exchanges'] = [v.as_dict() for v in self.exchanges]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.exchanges:
+            body["exchanges"] = [v.as_dict() for v in self.exchanges]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListExchangesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchanges: body['exchanges'] = self.exchanges
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.exchanges:
+            body["exchanges"] = self.exchanges
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangesResponse:
         """Deserializes the ListExchangesResponse from a dictionary."""
-        return cls(exchanges=_repeated_dict(d, 'exchanges', Exchange),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            exchanges=_repeated_dict(d, "exchanges", Exchange),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1490,22 +1750,28 @@ class ListFilesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListFilesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_infos: body['file_infos'] = [v.as_dict() for v in self.file_infos]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.file_infos:
+            body["file_infos"] = [v.as_dict() for v in self.file_infos]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListFilesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_infos: body['file_infos'] = self.file_infos
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.file_infos:
+            body["file_infos"] = self.file_infos
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFilesResponse:
         """Deserializes the ListFilesResponse from a dictionary."""
-        return cls(file_infos=_repeated_dict(d, 'file_infos', FileInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            file_infos=_repeated_dict(d, "file_infos", FileInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1517,22 +1783,28 @@ class ListFulfillmentsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListFulfillmentsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.fulfillments: body['fulfillments'] = [v.as_dict() for v in self.fulfillments]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.fulfillments:
+            body["fulfillments"] = [v.as_dict() for v in self.fulfillments]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListFulfillmentsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.fulfillments: body['fulfillments'] = self.fulfillments
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.fulfillments:
+            body["fulfillments"] = self.fulfillments
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFulfillmentsResponse:
         """Deserializes the ListFulfillmentsResponse from a dictionary."""
-        return cls(fulfillments=_repeated_dict(d, 'fulfillments', ListingFulfillment),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            fulfillments=_repeated_dict(d, "fulfillments", ListingFulfillment),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1544,22 +1816,28 @@ class ListInstallationsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListInstallationsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.installations: body['installations'] = [v.as_dict() for v in self.installations]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.installations:
+            body["installations"] = [v.as_dict() for v in self.installations]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListInstallationsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.installations: body['installations'] = self.installations
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.installations:
+            body["installations"] = self.installations
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstallationsResponse:
         """Deserializes the ListInstallationsResponse from a dictionary."""
-        return cls(installations=_repeated_dict(d, 'installations', InstallationDetail),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            installations=_repeated_dict(d, "installations", InstallationDetail),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1571,22 +1849,28 @@ class ListListingsForExchangeResponse:
     def as_dict(self) -> dict:
         """Serializes the ListListingsForExchangeResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange_listings: body['exchange_listings'] = [v.as_dict() for v in self.exchange_listings]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.exchange_listings:
+            body["exchange_listings"] = [v.as_dict() for v in self.exchange_listings]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListListingsForExchangeResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange_listings: body['exchange_listings'] = self.exchange_listings
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.exchange_listings:
+            body["exchange_listings"] = self.exchange_listings
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListListingsForExchangeResponse:
         """Deserializes the ListListingsForExchangeResponse from a dictionary."""
-        return cls(exchange_listings=_repeated_dict(d, 'exchange_listings', ExchangeListing),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            exchange_listings=_repeated_dict(d, "exchange_listings", ExchangeListing),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1598,22 +1882,28 @@ class ListListingsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListListingsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listings: body['listings'] = [v.as_dict() for v in self.listings]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.listings:
+            body["listings"] = [v.as_dict() for v in self.listings]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListListingsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listings: body['listings'] = self.listings
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.listings:
+            body["listings"] = self.listings
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListListingsResponse:
         """Deserializes the ListListingsResponse from a dictionary."""
-        return cls(listings=_repeated_dict(d, 'listings', Listing),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            listings=_repeated_dict(d, "listings", Listing),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1628,25 +1918,33 @@ class ListProviderAnalyticsDashboardResponse:
     def as_dict(self) -> dict:
         """Serializes the ListProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.id is not None: body['id'] = self.id
-        if self.version is not None: body['version'] = self.version
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.id is not None: body['id'] = self.id
-        if self.version is not None: body['version'] = self.version
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderAnalyticsDashboardResponse:
         """Deserializes the ListProviderAnalyticsDashboardResponse from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   id=d.get('id', None),
-                   version=d.get('version', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            id=d.get("id", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -1658,22 +1956,28 @@ class ListProvidersResponse:
     def as_dict(self) -> dict:
         """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.providers:
+            body["providers"] = [v.as_dict() for v in self.providers]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.providers: body['providers'] = self.providers
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.providers:
+            body["providers"] = self.providers
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   providers=_repeated_dict(d, 'providers', ProviderInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            providers=_repeated_dict(d, "providers", ProviderInfo),
+        )
 
 
 @dataclass
@@ -1688,25 +1992,33 @@ class Listing:
     def as_dict(self) -> dict:
         """Serializes the Listing into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.detail: body['detail'] = self.detail.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.summary: body['summary'] = self.summary.as_dict()
+        if self.detail:
+            body["detail"] = self.detail.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.summary:
+            body["summary"] = self.summary.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Listing into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.detail: body['detail'] = self.detail
-        if self.id is not None: body['id'] = self.id
-        if self.summary: body['summary'] = self.summary
+        if self.detail:
+            body["detail"] = self.detail
+        if self.id is not None:
+            body["id"] = self.id
+        if self.summary:
+            body["summary"] = self.summary
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Listing:
         """Deserializes the Listing from a dictionary."""
-        return cls(detail=_from_dict(d, 'detail', ListingDetail),
-                   id=d.get('id', None),
-                   summary=_from_dict(d, 'summary', ListingSummary))
+        return cls(
+            detail=_from_dict(d, "detail", ListingDetail),
+            id=d.get("id", None),
+            summary=_from_dict(d, "summary", ListingSummary),
+        )
 
 
 @dataclass
@@ -1771,75 +2083,113 @@ class ListingDetail:
     def as_dict(self) -> dict:
         """Serializes the ListingDetail into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.assets: body['assets'] = [v.value for v in self.assets]
-        if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end
-        if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start
-        if self.collection_granularity: body['collection_granularity'] = self.collection_granularity.as_dict()
-        if self.cost is not None: body['cost'] = self.cost.value
-        if self.data_source is not None: body['data_source'] = self.data_source
-        if self.description is not None: body['description'] = self.description
-        if self.documentation_link is not None: body['documentation_link'] = self.documentation_link
+        if self.assets:
+            body["assets"] = [v.value for v in self.assets]
+        if self.collection_date_end is not None:
+            body["collection_date_end"] = self.collection_date_end
+        if self.collection_date_start is not None:
+            body["collection_date_start"] = self.collection_date_start
+        if self.collection_granularity:
+            body["collection_granularity"] = self.collection_granularity.as_dict()
+        if self.cost is not None:
+            body["cost"] = self.cost.value
+        if self.data_source is not None:
+            body["data_source"] = self.data_source
+        if self.description is not None:
+            body["description"] = self.description
+        if self.documentation_link is not None:
+            body["documentation_link"] = self.documentation_link
         if self.embedded_notebook_file_infos:
-            body['embedded_notebook_file_infos'] = [v.as_dict() for v in self.embedded_notebook_file_infos]
-        if self.file_ids: body['file_ids'] = [v for v in self.file_ids]
-        if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage
-        if self.license is not None: body['license'] = self.license
-        if self.pricing_model is not None: body['pricing_model'] = self.pricing_model
-        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
-        if self.size is not None: body['size'] = self.size
-        if self.support_link is not None: body['support_link'] = self.support_link
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service
-        if self.update_frequency: body['update_frequency'] = self.update_frequency.as_dict()
+            body["embedded_notebook_file_infos"] = [v.as_dict() for v in self.embedded_notebook_file_infos]
+        if self.file_ids:
+            body["file_ids"] = [v for v in self.file_ids]
+        if self.geographical_coverage is not None:
+            body["geographical_coverage"] = self.geographical_coverage
+        if self.license is not None:
+            body["license"] = self.license
+        if self.pricing_model is not None:
+            body["pricing_model"] = self.pricing_model
+        if self.privacy_policy_link is not None:
+            body["privacy_policy_link"] = self.privacy_policy_link
+        if self.size is not None:
+            body["size"] = self.size
+        if self.support_link is not None:
+            body["support_link"] = self.support_link
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.terms_of_service is not None:
+            body["terms_of_service"] = self.terms_of_service
+        if self.update_frequency:
+            body["update_frequency"] = self.update_frequency.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListingDetail into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.assets: body['assets'] = self.assets
-        if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end
-        if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start
-        if self.collection_granularity: body['collection_granularity'] = self.collection_granularity
-        if self.cost is not None: body['cost'] = self.cost
-        if self.data_source is not None: body['data_source'] = self.data_source
-        if self.description is not None: body['description'] = self.description
-        if self.documentation_link is not None: body['documentation_link'] = self.documentation_link
+        if self.assets:
+            body["assets"] = self.assets
+        if self.collection_date_end is not None:
+            body["collection_date_end"] = self.collection_date_end
+        if self.collection_date_start is not None:
+            body["collection_date_start"] = self.collection_date_start
+        if self.collection_granularity:
+            body["collection_granularity"] = self.collection_granularity
+        if self.cost is not None:
+            body["cost"] = self.cost
+        if self.data_source is not None:
+            body["data_source"] = self.data_source
+        if self.description is not None:
+            body["description"] = self.description
+        if self.documentation_link is not None:
+            body["documentation_link"] = self.documentation_link
         if self.embedded_notebook_file_infos:
-            body['embedded_notebook_file_infos'] = self.embedded_notebook_file_infos
-        if self.file_ids: body['file_ids'] = self.file_ids
-        if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage
-        if self.license is not None: body['license'] = self.license
-        if self.pricing_model is not None: body['pricing_model'] = self.pricing_model
-        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
-        if self.size is not None: body['size'] = self.size
-        if self.support_link is not None: body['support_link'] = self.support_link
-        if self.tags: body['tags'] = self.tags
-        if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service
-        if self.update_frequency: body['update_frequency'] = self.update_frequency
+            body["embedded_notebook_file_infos"] = self.embedded_notebook_file_infos
+        if self.file_ids:
+            body["file_ids"] = self.file_ids
+        if self.geographical_coverage is not None:
+            body["geographical_coverage"] = self.geographical_coverage
+        if self.license is not None:
+            body["license"] = self.license
+        if self.pricing_model is not None:
+            body["pricing_model"] = self.pricing_model
+        if self.privacy_policy_link is not None:
+            body["privacy_policy_link"] = self.privacy_policy_link
+        if self.size is not None:
+            body["size"] = self.size
+        if self.support_link is not None:
+            body["support_link"] = self.support_link
+        if self.tags:
+            body["tags"] = self.tags
+        if self.terms_of_service is not None:
+            body["terms_of_service"] = self.terms_of_service
+        if self.update_frequency:
+            body["update_frequency"] = self.update_frequency
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingDetail:
         """Deserializes the ListingDetail from a dictionary."""
-        return cls(assets=_repeated_enum(d, 'assets', AssetType),
-                   collection_date_end=d.get('collection_date_end', None),
-                   collection_date_start=d.get('collection_date_start', None),
-                   collection_granularity=_from_dict(d, 'collection_granularity', DataRefreshInfo),
-                   cost=_enum(d, 'cost', Cost),
-                   data_source=d.get('data_source', None),
-                   description=d.get('description', None),
-                   documentation_link=d.get('documentation_link', None),
-                   embedded_notebook_file_infos=_repeated_dict(d, 'embedded_notebook_file_infos', FileInfo),
-                   file_ids=d.get('file_ids', None),
-                   geographical_coverage=d.get('geographical_coverage', None),
-                   license=d.get('license', None),
-                   pricing_model=d.get('pricing_model', None),
-                   privacy_policy_link=d.get('privacy_policy_link', None),
-                   size=d.get('size', None),
-                   support_link=d.get('support_link', None),
-                   tags=_repeated_dict(d, 'tags', ListingTag),
-                   terms_of_service=d.get('terms_of_service', None),
-                   update_frequency=_from_dict(d, 'update_frequency', DataRefreshInfo))
+        return cls(
+            assets=_repeated_enum(d, "assets", AssetType),
+            collection_date_end=d.get("collection_date_end", None),
+            collection_date_start=d.get("collection_date_start", None),
+            collection_granularity=_from_dict(d, "collection_granularity", DataRefreshInfo),
+            cost=_enum(d, "cost", Cost),
+            data_source=d.get("data_source", None),
+            description=d.get("description", None),
+            documentation_link=d.get("documentation_link", None),
+            embedded_notebook_file_infos=_repeated_dict(d, "embedded_notebook_file_infos", FileInfo),
+            file_ids=d.get("file_ids", None),
+            geographical_coverage=d.get("geographical_coverage", None),
+            license=d.get("license", None),
+            pricing_model=d.get("pricing_model", None),
+            privacy_policy_link=d.get("privacy_policy_link", None),
+            size=d.get("size", None),
+            support_link=d.get("support_link", None),
+            tags=_repeated_dict(d, "tags", ListingTag),
+            terms_of_service=d.get("terms_of_service", None),
+            update_frequency=_from_dict(d, "update_frequency", DataRefreshInfo),
+        )
 
 
 @dataclass
@@ -1857,31 +2207,43 @@ class ListingFulfillment:
     def as_dict(self) -> dict:
         """Serializes the ListingFulfillment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type.value
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
-        if self.repo_info: body['repo_info'] = self.repo_info.as_dict()
-        if self.share_info: body['share_info'] = self.share_info.as_dict()
+        if self.fulfillment_type is not None:
+            body["fulfillment_type"] = self.fulfillment_type.value
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type.value
+        if self.repo_info:
+            body["repo_info"] = self.repo_info.as_dict()
+        if self.share_info:
+            body["share_info"] = self.share_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListingFulfillment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
-        if self.repo_info: body['repo_info'] = self.repo_info
-        if self.share_info: body['share_info'] = self.share_info
+        if self.fulfillment_type is not None:
+            body["fulfillment_type"] = self.fulfillment_type
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type
+        if self.repo_info:
+            body["repo_info"] = self.repo_info
+        if self.share_info:
+            body["share_info"] = self.share_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment:
         """Deserializes the ListingFulfillment from a dictionary."""
-        return cls(fulfillment_type=_enum(d, 'fulfillment_type', FulfillmentType),
-                   listing_id=d.get('listing_id', None),
-                   recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType),
-                   repo_info=_from_dict(d, 'repo_info', RepoInfo),
-                   share_info=_from_dict(d, 'share_info', ShareInfo))
+        return cls(
+            fulfillment_type=_enum(d, "fulfillment_type", FulfillmentType),
+            listing_id=d.get("listing_id", None),
+            recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType),
+            repo_info=_from_dict(d, "repo_info", RepoInfo),
+            share_info=_from_dict(d, "share_info", ShareInfo),
+        )
 
 
 @dataclass
@@ -1891,34 +2253,36 @@ class ListingSetting:
     def as_dict(self) -> dict:
         """Serializes the ListingSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.visibility is not None: body['visibility'] = self.visibility.value
+        if self.visibility is not None:
+            body["visibility"] = self.visibility.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListingSetting into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.visibility is not None: body['visibility'] = self.visibility
+        if self.visibility is not None:
+            body["visibility"] = self.visibility
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSetting:
         """Deserializes the ListingSetting from a dictionary."""
-        return cls(visibility=_enum(d, 'visibility', Visibility))
+        return cls(visibility=_enum(d, "visibility", Visibility))
 
 
 class ListingShareType(Enum):
 
-    FULL = 'FULL'
-    SAMPLE = 'SAMPLE'
+    FULL = "FULL"
+    SAMPLE = "SAMPLE"
 
 
 class ListingStatus(Enum):
     """Enums"""
 
-    DRAFT = 'DRAFT'
-    PENDING = 'PENDING'
-    PUBLISHED = 'PUBLISHED'
-    SUSPENDED = 'SUSPENDED'
+    DRAFT = "DRAFT"
+    PENDING = "PENDING"
+    PUBLISHED = "PUBLISHED"
+    SUSPENDED = "SUSPENDED"
 
 
 @dataclass
@@ -1969,73 +2333,113 @@ class ListingSummary:
     def as_dict(self) -> dict:
         """Serializes the ListingSummary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.categories: body['categories'] = [v.value for v in self.categories]
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
-        if self.exchange_ids: body['exchange_ids'] = [v for v in self.exchange_ids]
-        if self.git_repo: body['git_repo'] = self.git_repo.as_dict()
-        if self.listing_type is not None: body['listingType'] = self.listing_type.value
-        if self.name is not None: body['name'] = self.name
-        if self.provider_id is not None: body['provider_id'] = self.provider_id
-        if self.provider_region: body['provider_region'] = self.provider_region.as_dict()
-        if self.published_at is not None: body['published_at'] = self.published_at
-        if self.published_by is not None: body['published_by'] = self.published_by
-        if self.setting: body['setting'] = self.setting.as_dict()
-        if self.share: body['share'] = self.share.as_dict()
-        if self.status is not None: body['status'] = self.status.value
-        if self.subtitle is not None: body['subtitle'] = self.subtitle
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id
+        if self.categories:
+            body["categories"] = [v.value for v in self.categories]
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.created_by_id is not None:
+            body["created_by_id"] = self.created_by_id
+        if self.exchange_ids:
+            body["exchange_ids"] = [v for v in self.exchange_ids]
+        if self.git_repo:
+            body["git_repo"] = self.git_repo.as_dict()
+        if self.listing_type is not None:
+            body["listingType"] = self.listing_type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.provider_id is not None:
+            body["provider_id"] = self.provider_id
+        if self.provider_region:
+            body["provider_region"] = self.provider_region.as_dict()
+        if self.published_at is not None:
+            body["published_at"] = self.published_at
+        if self.published_by is not None:
+            body["published_by"] = self.published_by
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
+        if self.share:
+            body["share"] = self.share.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.subtitle is not None:
+            body["subtitle"] = self.subtitle
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.updated_by_id is not None:
+            body["updated_by_id"] = self.updated_by_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListingSummary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.categories: body['categories'] = self.categories
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
-        if self.exchange_ids: body['exchange_ids'] = self.exchange_ids
-        if self.git_repo: body['git_repo'] = self.git_repo
-        if self.listing_type is not None: body['listingType'] = self.listing_type
-        if self.name is not None: body['name'] = self.name
-        if self.provider_id is not None: body['provider_id'] = self.provider_id
-        if self.provider_region: body['provider_region'] = self.provider_region
-        if self.published_at is not None: body['published_at'] = self.published_at
-        if self.published_by is not None: body['published_by'] = self.published_by
-        if self.setting: body['setting'] = self.setting
-        if self.share: body['share'] = self.share
-        if self.status is not None: body['status'] = self.status
-        if self.subtitle is not None: body['subtitle'] = self.subtitle
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id
+        if self.categories:
+            body["categories"] = self.categories
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.created_by_id is not None:
+            body["created_by_id"] = self.created_by_id
+        if self.exchange_ids:
+            body["exchange_ids"] = self.exchange_ids
+        if self.git_repo:
+            body["git_repo"] = self.git_repo
+        if self.listing_type is not None:
+            body["listingType"] = self.listing_type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.provider_id is not None:
+            body["provider_id"] = self.provider_id
+        if self.provider_region:
+            body["provider_region"] = self.provider_region
+        if self.published_at is not None:
+            body["published_at"] = self.published_at
+        if self.published_by is not None:
+            body["published_by"] = self.published_by
+        if self.setting:
+            body["setting"] = self.setting
+        if self.share:
+            body["share"] = self.share
+        if self.status is not None:
+            body["status"] = self.status
+        if self.subtitle is not None:
+            body["subtitle"] = self.subtitle
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
+        if self.updated_by_id is not None:
+            body["updated_by_id"] = self.updated_by_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSummary:
         """Deserializes the ListingSummary from a dictionary."""
-        return cls(categories=_repeated_enum(d, 'categories', Category),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   created_by_id=d.get('created_by_id', None),
-                   exchange_ids=d.get('exchange_ids', None),
-                   git_repo=_from_dict(d, 'git_repo', RepoInfo),
-                   listing_type=_enum(d, 'listingType', ListingType),
-                   name=d.get('name', None),
-                   provider_id=d.get('provider_id', None),
-                   provider_region=_from_dict(d, 'provider_region', RegionInfo),
-                   published_at=d.get('published_at', None),
-                   published_by=d.get('published_by', None),
-                   setting=_from_dict(d, 'setting', ListingSetting),
-                   share=_from_dict(d, 'share', ShareInfo),
-                   status=_enum(d, 'status', ListingStatus),
-                   subtitle=d.get('subtitle', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None),
-                   updated_by_id=d.get('updated_by_id', None))
+        return cls(
+            categories=_repeated_enum(d, "categories", Category),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            created_by_id=d.get("created_by_id", None),
+            exchange_ids=d.get("exchange_ids", None),
+            git_repo=_from_dict(d, "git_repo", RepoInfo),
+            listing_type=_enum(d, "listingType", ListingType),
+            name=d.get("name", None),
+            provider_id=d.get("provider_id", None),
+            provider_region=_from_dict(d, "provider_region", RegionInfo),
+            published_at=d.get("published_at", None),
+            published_by=d.get("published_by", None),
+            setting=_from_dict(d, "setting", ListingSetting),
+            share=_from_dict(d, "share", ShareInfo),
+            status=_enum(d, "status", ListingStatus),
+            subtitle=d.get("subtitle", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+            updated_by_id=d.get("updated_by_id", None),
+        )
 
 
 @dataclass
@@ -2049,39 +2453,46 @@ class ListingTag:
     def as_dict(self) -> dict:
         """Serializes the ListingTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.tag_name is not None: body['tag_name'] = self.tag_name.value
-        if self.tag_values: body['tag_values'] = [v for v in self.tag_values]
+        if self.tag_name is not None:
+            body["tag_name"] = self.tag_name.value
+        if self.tag_values:
+            body["tag_values"] = [v for v in self.tag_values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListingTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.tag_name is not None: body['tag_name'] = self.tag_name
-        if self.tag_values: body['tag_values'] = self.tag_values
+        if self.tag_name is not None:
+            body["tag_name"] = self.tag_name
+        if self.tag_values:
+            body["tag_values"] = self.tag_values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingTag:
         """Deserializes the ListingTag from a dictionary."""
-        return cls(tag_name=_enum(d, 'tag_name', ListingTagType), tag_values=d.get('tag_values', None))
+        return cls(
+            tag_name=_enum(d, "tag_name", ListingTagType),
+            tag_values=d.get("tag_values", None),
+        )
 
 
 class ListingTagType(Enum):
 
-    LISTING_TAG_TYPE_LANGUAGE = 'LISTING_TAG_TYPE_LANGUAGE'
-    LISTING_TAG_TYPE_TASK = 'LISTING_TAG_TYPE_TASK'
+    LISTING_TAG_TYPE_LANGUAGE = "LISTING_TAG_TYPE_LANGUAGE"
+    LISTING_TAG_TYPE_TASK = "LISTING_TAG_TYPE_TASK"
 
 
 class ListingType(Enum):
 
-    PERSONALIZED = 'PERSONALIZED'
-    STANDARD = 'STANDARD'
+    PERSONALIZED = "PERSONALIZED"
+    STANDARD = "STANDARD"
 
 
 class MarketplaceFileType(Enum):
 
-    EMBEDDED_NOTEBOOK = 'EMBEDDED_NOTEBOOK'
-    PROVIDER_ICON = 'PROVIDER_ICON'
+    EMBEDDED_NOTEBOOK = "EMBEDDED_NOTEBOOK"
+    PROVIDER_ICON = "PROVIDER_ICON"
 
 
 @dataclass
@@ -2122,72 +2533,106 @@ class PersonalizationRequest:
     def as_dict(self) -> dict:
         """Serializes the PersonalizationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.consumer_region: body['consumer_region'] = self.consumer_region.as_dict()
-        if self.contact_info: body['contact_info'] = self.contact_info.as_dict()
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.id is not None: body['id'] = self.id
-        if self.intended_use is not None: body['intended_use'] = self.intended_use
-        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.listing_name is not None: body['listing_name'] = self.listing_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.provider_id is not None: body['provider_id'] = self.provider_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
-        if self.share: body['share'] = self.share.as_dict()
-        if self.status is not None: body['status'] = self.status.value
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.consumer_region:
+            body["consumer_region"] = self.consumer_region.as_dict()
+        if self.contact_info:
+            body["contact_info"] = self.contact_info.as_dict()
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.id is not None:
+            body["id"] = self.id
+        if self.intended_use is not None:
+            body["intended_use"] = self.intended_use
+        if self.is_from_lighthouse is not None:
+            body["is_from_lighthouse"] = self.is_from_lighthouse
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.listing_name is not None:
+            body["listing_name"] = self.listing_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.provider_id is not None:
+            body["provider_id"] = self.provider_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type.value
+        if self.share:
+            body["share"] = self.share.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PersonalizationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.consumer_region: body['consumer_region'] = self.consumer_region
-        if self.contact_info: body['contact_info'] = self.contact_info
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.id is not None: body['id'] = self.id
-        if self.intended_use is not None: body['intended_use'] = self.intended_use
-        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.listing_name is not None: body['listing_name'] = self.listing_name
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.provider_id is not None: body['provider_id'] = self.provider_id
-        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
-        if self.share: body['share'] = self.share
-        if self.status is not None: body['status'] = self.status
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.consumer_region:
+            body["consumer_region"] = self.consumer_region
+        if self.contact_info:
+            body["contact_info"] = self.contact_info
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.id is not None:
+            body["id"] = self.id
+        if self.intended_use is not None:
+            body["intended_use"] = self.intended_use
+        if self.is_from_lighthouse is not None:
+            body["is_from_lighthouse"] = self.is_from_lighthouse
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.listing_name is not None:
+            body["listing_name"] = self.listing_name
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.provider_id is not None:
+            body["provider_id"] = self.provider_id
+        if self.recipient_type is not None:
+            body["recipient_type"] = self.recipient_type
+        if self.share:
+            body["share"] = self.share
+        if self.status is not None:
+            body["status"] = self.status
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalizationRequest:
         """Deserializes the PersonalizationRequest from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   consumer_region=_from_dict(d, 'consumer_region', RegionInfo),
-                   contact_info=_from_dict(d, 'contact_info', ContactInfo),
-                   created_at=d.get('created_at', None),
-                   id=d.get('id', None),
-                   intended_use=d.get('intended_use', None),
-                   is_from_lighthouse=d.get('is_from_lighthouse', None),
-                   listing_id=d.get('listing_id', None),
-                   listing_name=d.get('listing_name', None),
-                   metastore_id=d.get('metastore_id', None),
-                   provider_id=d.get('provider_id', None),
-                   recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType),
-                   share=_from_dict(d, 'share', ShareInfo),
-                   status=_enum(d, 'status', PersonalizationRequestStatus),
-                   status_message=d.get('status_message', None),
-                   updated_at=d.get('updated_at', None))
+        return cls(
+            comment=d.get("comment", None),
+            consumer_region=_from_dict(d, "consumer_region", RegionInfo),
+            contact_info=_from_dict(d, "contact_info", ContactInfo),
+            created_at=d.get("created_at", None),
+            id=d.get("id", None),
+            intended_use=d.get("intended_use", None),
+            is_from_lighthouse=d.get("is_from_lighthouse", None),
+            listing_id=d.get("listing_id", None),
+            listing_name=d.get("listing_name", None),
+            metastore_id=d.get("metastore_id", None),
+            provider_id=d.get("provider_id", None),
+            recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType),
+            share=_from_dict(d, "share", ShareInfo),
+            status=_enum(d, "status", PersonalizationRequestStatus),
+            status_message=d.get("status_message", None),
+            updated_at=d.get("updated_at", None),
+        )
 
 
 class PersonalizationRequestStatus(Enum):
 
-    DENIED = 'DENIED'
-    FULFILLED = 'FULFILLED'
-    NEW = 'NEW'
-    REQUEST_PENDING = 'REQUEST_PENDING'
+    DENIED = "DENIED"
+    FULFILLED = "FULFILLED"
+    NEW = "NEW"
+    REQUEST_PENDING = "REQUEST_PENDING"
 
 
 @dataclass
@@ -2197,19 +2642,21 @@ class ProviderAnalyticsDashboard:
     def as_dict(self) -> dict:
         """Serializes the ProviderAnalyticsDashboard into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ProviderAnalyticsDashboard into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard:
         """Deserializes the ProviderAnalyticsDashboard from a dictionary."""
-        return cls(id=d.get('id', None))
+        return cls(id=d.get("id", None))
 
 
 @dataclass
@@ -2248,63 +2695,87 @@ def as_dict(self) -> dict:
         """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.business_contact_email is not None:
-            body['business_contact_email'] = self.business_contact_email
-        if self.company_website_link is not None: body['company_website_link'] = self.company_website_link
+            body["business_contact_email"] = self.business_contact_email
+        if self.company_website_link is not None:
+            body["company_website_link"] = self.company_website_link
         if self.dark_mode_icon_file_id is not None:
-            body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id
+            body["dark_mode_icon_file_id"] = self.dark_mode_icon_file_id
         if self.dark_mode_icon_file_path is not None:
-            body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path
-        if self.description is not None: body['description'] = self.description
-        if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id
-        if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path
-        if self.id is not None: body['id'] = self.id
-        if self.is_featured is not None: body['is_featured'] = self.is_featured
-        if self.name is not None: body['name'] = self.name
-        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
-        if self.published_by is not None: body['published_by'] = self.published_by
-        if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email
-        if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link
+            body["dark_mode_icon_file_path"] = self.dark_mode_icon_file_path
+        if self.description is not None:
+            body["description"] = self.description
+        if self.icon_file_id is not None:
+            body["icon_file_id"] = self.icon_file_id
+        if self.icon_file_path is not None:
+            body["icon_file_path"] = self.icon_file_path
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_featured is not None:
+            body["is_featured"] = self.is_featured
+        if self.name is not None:
+            body["name"] = self.name
+        if self.privacy_policy_link is not None:
+            body["privacy_policy_link"] = self.privacy_policy_link
+        if self.published_by is not None:
+            body["published_by"] = self.published_by
+        if self.support_contact_email is not None:
+            body["support_contact_email"] = self.support_contact_email
+        if self.term_of_service_link is not None:
+            body["term_of_service_link"] = self.term_of_service_link
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.business_contact_email is not None:
-            body['business_contact_email'] = self.business_contact_email
-        if self.company_website_link is not None: body['company_website_link'] = self.company_website_link
+            body["business_contact_email"] = self.business_contact_email
+        if self.company_website_link is not None:
+            body["company_website_link"] = self.company_website_link
         if self.dark_mode_icon_file_id is not None:
-            body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id
+            body["dark_mode_icon_file_id"] = self.dark_mode_icon_file_id
         if self.dark_mode_icon_file_path is not None:
-            body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path
-        if self.description is not None: body['description'] = self.description
-        if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id
-        if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path
-        if self.id is not None: body['id'] = self.id
-        if self.is_featured is not None: body['is_featured'] = self.is_featured
-        if self.name is not None: body['name'] = self.name
-        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
-        if self.published_by is not None: body['published_by'] = self.published_by
-        if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email
-        if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link
+            body["dark_mode_icon_file_path"] = self.dark_mode_icon_file_path
+        if self.description is not None:
+            body["description"] = self.description
+        if self.icon_file_id is not None:
+            body["icon_file_id"] = self.icon_file_id
+        if self.icon_file_path is not None:
+            body["icon_file_path"] = self.icon_file_path
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_featured is not None:
+            body["is_featured"] = self.is_featured
+        if self.name is not None:
+            body["name"] = self.name
+        if self.privacy_policy_link is not None:
+            body["privacy_policy_link"] = self.privacy_policy_link
+        if self.published_by is not None:
+            body["published_by"] = self.published_by
+        if self.support_contact_email is not None:
+            body["support_contact_email"] = self.support_contact_email
+        if self.term_of_service_link is not None:
+            body["term_of_service_link"] = self.term_of_service_link
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
         """Deserializes the ProviderInfo from a dictionary."""
-        return cls(business_contact_email=d.get('business_contact_email', None),
-                   company_website_link=d.get('company_website_link', None),
-                   dark_mode_icon_file_id=d.get('dark_mode_icon_file_id', None),
-                   dark_mode_icon_file_path=d.get('dark_mode_icon_file_path', None),
-                   description=d.get('description', None),
-                   icon_file_id=d.get('icon_file_id', None),
-                   icon_file_path=d.get('icon_file_path', None),
-                   id=d.get('id', None),
-                   is_featured=d.get('is_featured', None),
-                   name=d.get('name', None),
-                   privacy_policy_link=d.get('privacy_policy_link', None),
-                   published_by=d.get('published_by', None),
-                   support_contact_email=d.get('support_contact_email', None),
-                   term_of_service_link=d.get('term_of_service_link', None))
+        return cls(
+            business_contact_email=d.get("business_contact_email", None),
+            company_website_link=d.get("company_website_link", None),
+            dark_mode_icon_file_id=d.get("dark_mode_icon_file_id", None),
+            dark_mode_icon_file_path=d.get("dark_mode_icon_file_path", None),
+            description=d.get("description", None),
+            icon_file_id=d.get("icon_file_id", None),
+            icon_file_path=d.get("icon_file_path", None),
+            id=d.get("id", None),
+            is_featured=d.get("is_featured", None),
+            name=d.get("name", None),
+            privacy_policy_link=d.get("privacy_policy_link", None),
+            published_by=d.get("published_by", None),
+            support_contact_email=d.get("support_contact_email", None),
+            term_of_service_link=d.get("term_of_service_link", None),
+        )
 
 
 @dataclass
@@ -2316,21 +2787,25 @@ class RegionInfo:
     def as_dict(self) -> dict:
         """Serializes the RegionInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.region is not None: body['region'] = self.region
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegionInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.region is not None: body['region'] = self.region
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegionInfo:
         """Deserializes the RegionInfo from a dictionary."""
-        return cls(cloud=d.get('cloud', None), region=d.get('region', None))
+        return cls(cloud=d.get("cloud", None), region=d.get("region", None))
 
 
 @dataclass
@@ -2360,19 +2835,21 @@ class RepoInfo:
     def as_dict(self) -> dict:
         """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url
+        if self.git_repo_url is not None:
+            body["git_repo_url"] = self.git_repo_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url
+        if self.git_repo_url is not None:
+            body["git_repo_url"] = self.git_repo_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInfo:
         """Deserializes the RepoInfo from a dictionary."""
-        return cls(git_repo_url=d.get('git_repo_url', None))
+        return cls(git_repo_url=d.get("git_repo_url", None))
 
 
 @dataclass
@@ -2387,21 +2864,28 @@ class RepoInstallation:
     def as_dict(self) -> dict:
         """Serializes the RepoInstallation into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.repo_name is not None: body['repo_name'] = self.repo_name
-        if self.repo_path is not None: body['repo_path'] = self.repo_path
+        if self.repo_name is not None:
+            body["repo_name"] = self.repo_name
+        if self.repo_path is not None:
+            body["repo_path"] = self.repo_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoInstallation into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.repo_name is not None: body['repo_name'] = self.repo_name
-        if self.repo_path is not None: body['repo_path'] = self.repo_path
+        if self.repo_name is not None:
+            body["repo_name"] = self.repo_name
+        if self.repo_path is not None:
+            body["repo_path"] = self.repo_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInstallation:
         """Deserializes the RepoInstallation from a dictionary."""
-        return cls(repo_name=d.get('repo_name', None), repo_path=d.get('repo_path', None))
+        return cls(
+            repo_name=d.get("repo_name", None),
+            repo_path=d.get("repo_path", None),
+        )
 
 
 @dataclass
@@ -2413,22 +2897,28 @@ class SearchListingsResponse:
     def as_dict(self) -> dict:
         """Serializes the SearchListingsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listings: body['listings'] = [v.as_dict() for v in self.listings]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.listings:
+            body["listings"] = [v.as_dict() for v in self.listings]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchListingsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listings: body['listings'] = self.listings
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.listings:
+            body["listings"] = self.listings
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchListingsResponse:
         """Deserializes the SearchListingsResponse from a dictionary."""
-        return cls(listings=_repeated_dict(d, 'listings', Listing),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            listings=_repeated_dict(d, "listings", Listing),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -2440,21 +2930,25 @@ class ShareInfo:
     def as_dict(self) -> dict:
         """Serializes the ShareInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.type is not None: body['type'] = self.type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ShareInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.type is not None: body['type'] = self.type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareInfo:
         """Deserializes the ShareInfo from a dictionary."""
-        return cls(name=d.get('name', None), type=_enum(d, 'type', ListingShareType))
+        return cls(name=d.get("name", None), type=_enum(d, "type", ListingShareType))
 
 
 @dataclass
@@ -2468,21 +2962,28 @@ class SharedDataObject:
     def as_dict(self) -> dict:
         """Serializes the SharedDataObject into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
-        if self.name is not None: body['name'] = self.name
+        if self.data_object_type is not None:
+            body["data_object_type"] = self.data_object_type
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
-        if self.name is not None: body['name'] = self.name
+        if self.data_object_type is not None:
+            body["data_object_type"] = self.data_object_type
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         """Deserializes the SharedDataObject from a dictionary."""
-        return cls(data_object_type=d.get('data_object_type', None), name=d.get('name', None))
+        return cls(
+            data_object_type=d.get("data_object_type", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -2500,30 +3001,38 @@ class TokenDetail:
     def as_dict(self) -> dict:
         """Serializes the TokenDetail into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
-        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.bearer_token is not None:
+            body["bearerToken"] = self.bearer_token
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
+        if self.expiration_time is not None:
+            body["expirationTime"] = self.expiration_time
         if self.share_credentials_version is not None:
-            body['shareCredentialsVersion'] = self.share_credentials_version
+            body["shareCredentialsVersion"] = self.share_credentials_version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenDetail into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
-        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.bearer_token is not None:
+            body["bearerToken"] = self.bearer_token
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
+        if self.expiration_time is not None:
+            body["expirationTime"] = self.expiration_time
         if self.share_credentials_version is not None:
-            body['shareCredentialsVersion'] = self.share_credentials_version
+            body["shareCredentialsVersion"] = self.share_credentials_version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenDetail:
         """Deserializes the TokenDetail from a dictionary."""
-        return cls(bearer_token=d.get('bearerToken', None),
-                   endpoint=d.get('endpoint', None),
-                   expiration_time=d.get('expirationTime', None),
-                   share_credentials_version=d.get('shareCredentialsVersion', None))
+        return cls(
+            bearer_token=d.get("bearerToken", None),
+            endpoint=d.get("endpoint", None),
+            expiration_time=d.get("expirationTime", None),
+            share_credentials_version=d.get("shareCredentialsVersion", None),
+        )
 
 
 @dataclass
@@ -2553,37 +3062,53 @@ class TokenInfo:
     def as_dict(self) -> dict:
         """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.activation_url is not None: body['activation_url'] = self.activation_url
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.id is not None: body['id'] = self.id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.activation_url is not None:
+            body["activation_url"] = self.activation_url
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.activation_url is not None: body['activation_url'] = self.activation_url
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.id is not None: body['id'] = self.id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.activation_url is not None:
+            body["activation_url"] = self.activation_url
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
         """Deserializes the TokenInfo from a dictionary."""
-        return cls(activation_url=d.get('activation_url', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   expiration_time=d.get('expiration_time', None),
-                   id=d.get('id', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            activation_url=d.get("activation_url", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            expiration_time=d.get("expiration_time", None),
+            id=d.get("id", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -2595,21 +3120,28 @@ class UpdateExchangeFilterRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateExchangeFilterRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filter: body['filter'] = self.filter.as_dict()
-        if self.id is not None: body['id'] = self.id
+        if self.filter:
+            body["filter"] = self.filter.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.filter: body['filter'] = self.filter
-        if self.id is not None: body['id'] = self.id
+        if self.filter:
+            body["filter"] = self.filter
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterRequest:
         """Deserializes the UpdateExchangeFilterRequest from a dictionary."""
-        return cls(filter=_from_dict(d, 'filter', ExchangeFilter), id=d.get('id', None))
+        return cls(
+            filter=_from_dict(d, "filter", ExchangeFilter),
+            id=d.get("id", None),
+        )
 
 
 @dataclass
@@ -2619,19 +3151,21 @@ class UpdateExchangeFilterResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateExchangeFilterResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filter: body['filter'] = self.filter.as_dict()
+        if self.filter:
+            body["filter"] = self.filter.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.filter: body['filter'] = self.filter
+        if self.filter:
+            body["filter"] = self.filter
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterResponse:
         """Deserializes the UpdateExchangeFilterResponse from a dictionary."""
-        return cls(filter=_from_dict(d, 'filter', ExchangeFilter))
+        return cls(filter=_from_dict(d, "filter", ExchangeFilter))
 
 
 @dataclass
@@ -2643,21 +3177,25 @@ class UpdateExchangeRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateExchangeRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange.as_dict()
-        if self.id is not None: body['id'] = self.id
+        if self.exchange:
+            body["exchange"] = self.exchange.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange
-        if self.id is not None: body['id'] = self.id
+        if self.exchange:
+            body["exchange"] = self.exchange
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeRequest:
         """Deserializes the UpdateExchangeRequest from a dictionary."""
-        return cls(exchange=_from_dict(d, 'exchange', Exchange), id=d.get('id', None))
+        return cls(exchange=_from_dict(d, "exchange", Exchange), id=d.get("id", None))
 
 
 @dataclass
@@ -2667,19 +3205,21 @@ class UpdateExchangeResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateExchangeResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange.as_dict()
+        if self.exchange:
+            body["exchange"] = self.exchange.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateExchangeResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exchange: body['exchange'] = self.exchange
+        if self.exchange:
+            body["exchange"] = self.exchange
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeResponse:
         """Deserializes the UpdateExchangeResponse from a dictionary."""
-        return cls(exchange=_from_dict(d, 'exchange', Exchange))
+        return cls(exchange=_from_dict(d, "exchange", Exchange))
 
 
 @dataclass
@@ -2695,28 +3235,38 @@ class UpdateInstallationRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateInstallationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.installation: body['installation'] = self.installation.as_dict()
-        if self.installation_id is not None: body['installation_id'] = self.installation_id
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.rotate_token is not None: body['rotate_token'] = self.rotate_token
+        if self.installation:
+            body["installation"] = self.installation.as_dict()
+        if self.installation_id is not None:
+            body["installation_id"] = self.installation_id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.rotate_token is not None:
+            body["rotate_token"] = self.rotate_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.installation: body['installation'] = self.installation
-        if self.installation_id is not None: body['installation_id'] = self.installation_id
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.rotate_token is not None: body['rotate_token'] = self.rotate_token
+        if self.installation:
+            body["installation"] = self.installation
+        if self.installation_id is not None:
+            body["installation_id"] = self.installation_id
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.rotate_token is not None:
+            body["rotate_token"] = self.rotate_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationRequest:
         """Deserializes the UpdateInstallationRequest from a dictionary."""
-        return cls(installation=_from_dict(d, 'installation', InstallationDetail),
-                   installation_id=d.get('installation_id', None),
-                   listing_id=d.get('listing_id', None),
-                   rotate_token=d.get('rotate_token', None))
+        return cls(
+            installation=_from_dict(d, "installation", InstallationDetail),
+            installation_id=d.get("installation_id", None),
+            listing_id=d.get("listing_id", None),
+            rotate_token=d.get("rotate_token", None),
+        )
 
 
 @dataclass
@@ -2726,19 +3276,21 @@ class UpdateInstallationResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateInstallationResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.installation: body['installation'] = self.installation.as_dict()
+        if self.installation:
+            body["installation"] = self.installation.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateInstallationResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.installation: body['installation'] = self.installation
+        if self.installation:
+            body["installation"] = self.installation
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationResponse:
         """Deserializes the UpdateInstallationResponse from a dictionary."""
-        return cls(installation=_from_dict(d, 'installation', InstallationDetail))
+        return cls(installation=_from_dict(d, "installation", InstallationDetail))
 
 
 @dataclass
@@ -2750,21 +3302,25 @@ class UpdateListingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateListingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.listing: body['listing'] = self.listing.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.listing:
+            body["listing"] = self.listing.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.listing: body['listing'] = self.listing
+        if self.id is not None:
+            body["id"] = self.id
+        if self.listing:
+            body["listing"] = self.listing
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateListingRequest:
         """Deserializes the UpdateListingRequest from a dictionary."""
-        return cls(id=d.get('id', None), listing=_from_dict(d, 'listing', Listing))
+        return cls(id=d.get("id", None), listing=_from_dict(d, "listing", Listing))
 
 
 @dataclass
@@ -2774,19 +3330,21 @@ class UpdateListingResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateListingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listing: body['listing'] = self.listing.as_dict()
+        if self.listing:
+            body["listing"] = self.listing.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateListingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listing: body['listing'] = self.listing
+        if self.listing:
+            body["listing"] = self.listing
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateListingResponse:
         """Deserializes the UpdateListingResponse from a dictionary."""
-        return cls(listing=_from_dict(d, 'listing', Listing))
+        return cls(listing=_from_dict(d, "listing", Listing))
 
 
 @dataclass
@@ -2804,31 +3362,43 @@ class UpdatePersonalizationRequestRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdatePersonalizationRequestRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.reason is not None: body['reason'] = self.reason
-        if self.request_id is not None: body['request_id'] = self.request_id
-        if self.share: body['share'] = self.share.as_dict()
-        if self.status is not None: body['status'] = self.status.value
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.reason is not None:
+            body["reason"] = self.reason
+        if self.request_id is not None:
+            body["request_id"] = self.request_id
+        if self.share:
+            body["share"] = self.share.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.listing_id is not None: body['listing_id'] = self.listing_id
-        if self.reason is not None: body['reason'] = self.reason
-        if self.request_id is not None: body['request_id'] = self.request_id
-        if self.share: body['share'] = self.share
-        if self.status is not None: body['status'] = self.status
+        if self.listing_id is not None:
+            body["listing_id"] = self.listing_id
+        if self.reason is not None:
+            body["reason"] = self.reason
+        if self.request_id is not None:
+            body["request_id"] = self.request_id
+        if self.share:
+            body["share"] = self.share
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestRequest:
         """Deserializes the UpdatePersonalizationRequestRequest from a dictionary."""
-        return cls(listing_id=d.get('listing_id', None),
-                   reason=d.get('reason', None),
-                   request_id=d.get('request_id', None),
-                   share=_from_dict(d, 'share', ShareInfo),
-                   status=_enum(d, 'status', PersonalizationRequestStatus))
+        return cls(
+            listing_id=d.get("listing_id", None),
+            reason=d.get("reason", None),
+            request_id=d.get("request_id", None),
+            share=_from_dict(d, "share", ShareInfo),
+            status=_enum(d, "status", PersonalizationRequestStatus),
+        )
 
 
 @dataclass
@@ -2838,19 +3408,21 @@ class UpdatePersonalizationRequestResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdatePersonalizationRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.request: body['request'] = self.request.as_dict()
+        if self.request:
+            body["request"] = self.request.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.request: body['request'] = self.request
+        if self.request:
+            body["request"] = self.request
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestResponse:
         """Deserializes the UpdatePersonalizationRequestResponse from a dictionary."""
-        return cls(request=_from_dict(d, 'request', PersonalizationRequest))
+        return cls(request=_from_dict(d, "request", PersonalizationRequest))
 
 
 @dataclass
@@ -2865,21 +3437,25 @@ class UpdateProviderAnalyticsDashboardRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateProviderAnalyticsDashboardRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.version is not None: body['version'] = self.version
+        if self.id is not None:
+            body["id"] = self.id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.version is not None: body['version'] = self.version
+        if self.id is not None:
+            body["id"] = self.id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardRequest:
         """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary."""
-        return cls(id=d.get('id', None), version=d.get('version', None))
+        return cls(id=d.get("id", None), version=d.get("version", None))
 
 
 @dataclass
@@ -2895,25 +3471,33 @@ class UpdateProviderAnalyticsDashboardResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.id is not None: body['id'] = self.id
-        if self.version is not None: body['version'] = self.version
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.id is not None: body['id'] = self.id
-        if self.version is not None: body['version'] = self.version
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardResponse:
         """Deserializes the UpdateProviderAnalyticsDashboardResponse from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   id=d.get('id', None),
-                   version=d.get('version', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            id=d.get("id", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -2925,21 +3509,28 @@ class UpdateProviderRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateProviderRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.provider: body['provider'] = self.provider.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.provider:
+            body["provider"] = self.provider.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.provider: body['provider'] = self.provider
+        if self.id is not None:
+            body["id"] = self.id
+        if self.provider:
+            body["provider"] = self.provider
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderRequest:
         """Deserializes the UpdateProviderRequest from a dictionary."""
-        return cls(id=d.get('id', None), provider=_from_dict(d, 'provider', ProviderInfo))
+        return cls(
+            id=d.get("id", None),
+            provider=_from_dict(d, "provider", ProviderInfo),
+        )
 
 
 @dataclass
@@ -2949,25 +3540,27 @@ class UpdateProviderResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateProviderResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.provider: body['provider'] = self.provider.as_dict()
+        if self.provider:
+            body["provider"] = self.provider.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateProviderResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.provider: body['provider'] = self.provider
+        if self.provider:
+            body["provider"] = self.provider
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderResponse:
         """Deserializes the UpdateProviderResponse from a dictionary."""
-        return cls(provider=_from_dict(d, 'provider', ProviderInfo))
+        return cls(provider=_from_dict(d, "provider", ProviderInfo))
 
 
 class Visibility(Enum):
 
-    PRIVATE = 'PRIVATE'
-    PUBLIC = 'PUBLIC'
+    PRIVATE = "PRIVATE"
+    PUBLIC = "PUBLIC"
 
 
 class ConsumerFulfillmentsAPI:
@@ -2976,74 +3569,90 @@ class ConsumerFulfillmentsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def get(self,
-            listing_id: str,
-            *,
-            page_size: Optional[int] = None,
-            page_token: Optional[str] = None) -> Iterator[SharedDataObject]:
+    def get(
+        self,
+        listing_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[SharedDataObject]:
         """Get listing content metadata.
-        
+
         Get a high level preview of the metadata of listing installable content.
-        
+
         :param listing_id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`SharedDataObject`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/marketplace-consumer/listings/{listing_id}/content',
-                                query=query,
-                                headers=headers)
-            if 'shared_data_objects' in json:
-                for v in json['shared_data_objects']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/marketplace-consumer/listings/{listing_id}/content",
+                query=query,
+                headers=headers,
+            )
+            if "shared_data_objects" in json:
+                for v in json["shared_data_objects"]:
                     yield SharedDataObject.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list(self,
-             listing_id: str,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ListingFulfillment]:
+            query["page_token"] = json["next_page_token"]
+
+    def list(
+        self,
+        listing_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ListingFulfillment]:
         """List all listing fulfillments.
-        
+
         Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation.
         Standard installations contain metadata about the attached share or git repo. Only one of these fields
         will be present. Personalized installations contain metadata about the attached share or git repo, as
         well as the Delta Sharing recipient type.
-        
+
         :param listing_id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ListingFulfillment`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/marketplace-consumer/listings/{listing_id}/fulfillments',
-                                query=query,
-                                headers=headers)
-            if 'fulfillments' in json:
-                for v in json['fulfillments']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/marketplace-consumer/listings/{listing_id}/fulfillments",
+                query=query,
+                headers=headers,
+            )
+            if "fulfillments" in json:
+                for v in json["fulfillments"]:
                     yield ListingFulfillment.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class ConsumerInstallationsAPI:
@@ -3052,18 +3661,20 @@ class ConsumerInstallationsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               listing_id: str,
-               *,
-               accepted_consumer_terms: Optional[ConsumerTerms] = None,
-               catalog_name: Optional[str] = None,
-               recipient_type: Optional[DeltaSharingRecipientType] = None,
-               repo_detail: Optional[RepoInstallation] = None,
-               share_name: Optional[str] = None) -> Installation:
+    def create(
+        self,
+        listing_id: str,
+        *,
+        accepted_consumer_terms: Optional[ConsumerTerms] = None,
+        catalog_name: Optional[str] = None,
+        recipient_type: Optional[DeltaSharingRecipientType] = None,
+        repo_detail: Optional[RepoInstallation] = None,
+        share_name: Optional[str] = None,
+    ) -> Installation:
         """Install from a listing.
-        
+
         Install payload associated with a Databricks Marketplace listing.
-        
+
         :param listing_id: str
         :param accepted_consumer_terms: :class:`ConsumerTerms` (optional)
         :param catalog_name: str (optional)
@@ -3071,135 +3682,172 @@ def create(self,
         :param repo_detail: :class:`RepoInstallation` (optional)
           for git repo installations
         :param share_name: str (optional)
-        
+
         :returns: :class:`Installation`
         """
         body = {}
         if accepted_consumer_terms is not None:
-            body['accepted_consumer_terms'] = accepted_consumer_terms.as_dict()
-        if catalog_name is not None: body['catalog_name'] = catalog_name
-        if recipient_type is not None: body['recipient_type'] = recipient_type.value
-        if repo_detail is not None: body['repo_detail'] = repo_detail.as_dict()
-        if share_name is not None: body['share_name'] = share_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations',
-                           body=body,
-                           headers=headers)
+            body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict()
+        if catalog_name is not None:
+            body["catalog_name"] = catalog_name
+        if recipient_type is not None:
+            body["recipient_type"] = recipient_type.value
+        if repo_detail is not None:
+            body["repo_detail"] = repo_detail.as_dict()
+        if share_name is not None:
+            body["share_name"] = share_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations",
+            body=body,
+            headers=headers,
+        )
         return Installation.from_dict(res)
 
     def delete(self, listing_id: str, installation_id: str):
         """Uninstall from a listing.
-        
+
         Uninstall an installation associated with a Databricks Marketplace listing.
-        
+
         :param listing_id: str
         :param installation_id: str
-        
-        
-        """
 
-        headers = {'Accept': 'application/json', }
 
-        self._api.do('DELETE',
-                     f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}',
-                     headers=headers)
+        """
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[InstallationDetail]:
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}",
+            headers=headers,
+        )
+
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[InstallationDetail]:
         """List all installations.
-        
+
         List all installations across all listings.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`InstallationDetail`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/marketplace-consumer/installations',
-                                query=query,
-                                headers=headers)
-            if 'installations' in json:
-                for v in json['installations']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/marketplace-consumer/installations",
+                query=query,
+                headers=headers,
+            )
+            if "installations" in json:
+                for v in json["installations"]:
                     yield InstallationDetail.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_listing_installations(self,
-                                   listing_id: str,
-                                   *,
-                                   page_size: Optional[int] = None,
-                                   page_token: Optional[str] = None) -> Iterator[InstallationDetail]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_listing_installations(
+        self,
+        listing_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[InstallationDetail]:
         """List installations for a listing.
-        
+
         List all installations for a particular listing.
-        
+
         :param listing_id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`InstallationDetail`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations',
-                                query=query,
-                                headers=headers)
-            if 'installations' in json:
-                for v in json['installations']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations",
+                query=query,
+                headers=headers,
+            )
+            if "installations" in json:
+                for v in json["installations"]:
                     yield InstallationDetail.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               listing_id: str,
-               installation_id: str,
-               installation: InstallationDetail,
-               *,
-               rotate_token: Optional[bool] = None) -> UpdateInstallationResponse:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        listing_id: str,
+        installation_id: str,
+        installation: InstallationDetail,
+        *,
+        rotate_token: Optional[bool] = None,
+    ) -> UpdateInstallationResponse:
         """Update an installation.
-        
+
         This is a update API that will update the part of the fields defined in the installation table as well
         as interact with external services according to the fields not included in the installation table 1.
         the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the
         rotateToken flag is true and the tokenInfo field is empty
-        
+
         :param listing_id: str
         :param installation_id: str
         :param installation: :class:`InstallationDetail`
         :param rotate_token: bool (optional)
-        
+
         :returns: :class:`UpdateInstallationResponse`
         """
         body = {}
-        if installation is not None: body['installation'] = installation.as_dict()
-        if rotate_token is not None: body['rotate_token'] = rotate_token
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if installation is not None:
+            body["installation"] = installation.as_dict()
+        if rotate_token is not None:
+            body["rotate_token"] = rotate_token
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PUT',
-            f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}',
+            "PUT",
+            f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return UpdateInstallationResponse.from_dict(res)
 
 
@@ -3212,54 +3860,67 @@ def __init__(self, api_client):
 
     def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetListingsResponse:
         """Get one batch of listings. One may specify up to 50 IDs per request.
-        
+
         Batch get a published listing in the Databricks Marketplace that the consumer has access to.
-        
+
         :param ids: List[str] (optional)
-        
+
         :returns: :class:`BatchGetListingsResponse`
         """
 
         query = {}
-        if ids is not None: query['ids'] = [v for v in ids]
-        headers = {'Accept': 'application/json', }
+        if ids is not None:
+            query["ids"] = [v for v in ids]
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.1/marketplace-consumer/listings:batchGet',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.1/marketplace-consumer/listings:batchGet",
+            query=query,
+            headers=headers,
+        )
         return BatchGetListingsResponse.from_dict(res)
 
     def get(self, id: str) -> GetListingResponse:
         """Get listing.
-        
+
         Get a published listing in the Databricks Marketplace that the consumer has access to.
-        
+
         :param id: str
-        
+
         :returns: :class:`GetListingResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/marketplace-consumer/listings/{id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/marketplace-consumer/listings/{id}",
+            headers=headers,
+        )
         return GetListingResponse.from_dict(res)
 
-    def list(self,
-             *,
-             assets: Optional[List[AssetType]] = None,
-             categories: Optional[List[Category]] = None,
-             is_free: Optional[bool] = None,
-             is_private_exchange: Optional[bool] = None,
-             is_staff_pick: Optional[bool] = None,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None,
-             provider_ids: Optional[List[str]] = None,
-             tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]:
+    def list(
+        self,
+        *,
+        assets: Optional[List[AssetType]] = None,
+        categories: Optional[List[Category]] = None,
+        is_free: Optional[bool] = None,
+        is_private_exchange: Optional[bool] = None,
+        is_staff_pick: Optional[bool] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+        provider_ids: Optional[List[str]] = None,
+        tags: Optional[List[ListingTag]] = None,
+    ) -> Iterator[Listing]:
         """List listings.
-        
+
         List all published listings in the Databricks Marketplace that the consumer has access to.
-        
+
         :param assets: List[:class:`AssetType`] (optional)
           Matches any of the following asset types
         :param categories: List[:class:`Category`] (optional)
@@ -3276,46 +3937,64 @@ def list(self,
           Matches any of the following provider ids
         :param tags: List[:class:`ListingTag`] (optional)
           Matches any of the following tags
-        
+
         :returns: Iterator over :class:`Listing`
         """
 
         query = {}
-        if assets is not None: query['assets'] = [v.value for v in assets]
-        if categories is not None: query['categories'] = [v.value for v in categories]
-        if is_free is not None: query['is_free'] = is_free
-        if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange
-        if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids]
-        if tags is not None: query['tags'] = [v.as_dict() for v in tags]
-        headers = {'Accept': 'application/json', }
+        if assets is not None:
+            query["assets"] = [v.value for v in assets]
+        if categories is not None:
+            query["categories"] = [v.value for v in categories]
+        if is_free is not None:
+            query["is_free"] = is_free
+        if is_private_exchange is not None:
+            query["is_private_exchange"] = is_private_exchange
+        if is_staff_pick is not None:
+            query["is_staff_pick"] = is_staff_pick
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if provider_ids is not None:
+            query["provider_ids"] = [v for v in provider_ids]
+        if tags is not None:
+            query["tags"] = [v.as_dict() for v in tags]
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.1/marketplace-consumer/listings', query=query, headers=headers)
-            if 'listings' in json:
-                for v in json['listings']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/marketplace-consumer/listings",
+                query=query,
+                headers=headers,
+            )
+            if "listings" in json:
+                for v in json["listings"]:
                     yield Listing.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def search(self,
-               query: str,
-               *,
-               assets: Optional[List[AssetType]] = None,
-               categories: Optional[List[Category]] = None,
-               is_free: Optional[bool] = None,
-               is_private_exchange: Optional[bool] = None,
-               page_size: Optional[int] = None,
-               page_token: Optional[str] = None,
-               provider_ids: Optional[List[str]] = None) -> Iterator[Listing]:
+            query["page_token"] = json["next_page_token"]
+
+    def search(
+        self,
+        query: str,
+        *,
+        assets: Optional[List[AssetType]] = None,
+        categories: Optional[List[Category]] = None,
+        is_free: Optional[bool] = None,
+        is_private_exchange: Optional[bool] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+        provider_ids: Optional[List[str]] = None,
+    ) -> Iterator[Listing]:
         """Search listings.
-        
+
         Search published listings in the Databricks Marketplace that the consumer has access to. This query
         supports a variety of different search parameters and performs fuzzy matching.
-        
+
         :param query: str
           Fuzzy matches query
         :param assets: List[:class:`AssetType`] (optional)
@@ -3328,32 +4007,44 @@ def search(self,
         :param page_token: str (optional)
         :param provider_ids: List[str] (optional)
           Matches any of the following provider ids
-        
+
         :returns: Iterator over :class:`Listing`
         """
 
         query = {}
-        if assets is not None: query['assets'] = [v.value for v in assets]
-        if categories is not None: query['categories'] = [v.value for v in categories]
-        if is_free is not None: query['is_free'] = is_free
-        if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids]
-        if query is not None: query['query'] = query
-        headers = {'Accept': 'application/json', }
+        if assets is not None:
+            query["assets"] = [v.value for v in assets]
+        if categories is not None:
+            query["categories"] = [v.value for v in categories]
+        if is_free is not None:
+            query["is_free"] = is_free
+        if is_private_exchange is not None:
+            query["is_private_exchange"] = is_private_exchange
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        if provider_ids is not None:
+            query["provider_ids"] = [v for v in provider_ids]
+        if query is not None:
+            query["query"] = query
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/marketplace-consumer/search-listings',
-                                query=query,
-                                headers=headers)
-            if 'listings' in json:
-                for v in json['listings']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/marketplace-consumer/search-listings",
+                query=query,
+                headers=headers,
+            )
+            if "listings" in json:
+                for v in json["listings"]:
                     yield Listing.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class ConsumerPersonalizationRequestsAPI:
@@ -3363,22 +4054,22 @@ def __init__(self, api_client):
         self._api = api_client
 
     def create(
-            self,
-            listing_id: str,
-            intended_use: str,
-            accepted_consumer_terms: ConsumerTerms,
-            *,
-            comment: Optional[str] = None,
-            company: Optional[str] = None,
-            first_name: Optional[str] = None,
-            is_from_lighthouse: Optional[bool] = None,
-            last_name: Optional[str] = None,
-            recipient_type: Optional[DeltaSharingRecipientType] = None
+        self,
+        listing_id: str,
+        intended_use: str,
+        accepted_consumer_terms: ConsumerTerms,
+        *,
+        comment: Optional[str] = None,
+        company: Optional[str] = None,
+        first_name: Optional[str] = None,
+        is_from_lighthouse: Optional[bool] = None,
+        last_name: Optional[str] = None,
+        recipient_type: Optional[DeltaSharingRecipientType] = None,
     ) -> CreatePersonalizationRequestResponse:
         """Create a personalization request.
-        
+
         Create a personalization request for a listing.
-        
+
         :param listing_id: str
         :param intended_use: str
         :param accepted_consumer_terms: :class:`ConsumerTerms`
@@ -3388,75 +4079,99 @@ def create(
         :param is_from_lighthouse: bool (optional)
         :param last_name: str (optional)
         :param recipient_type: :class:`DeltaSharingRecipientType` (optional)
-        
+
         :returns: :class:`CreatePersonalizationRequestResponse`
         """
         body = {}
         if accepted_consumer_terms is not None:
-            body['accepted_consumer_terms'] = accepted_consumer_terms.as_dict()
-        if comment is not None: body['comment'] = comment
-        if company is not None: body['company'] = company
-        if first_name is not None: body['first_name'] = first_name
-        if intended_use is not None: body['intended_use'] = intended_use
-        if is_from_lighthouse is not None: body['is_from_lighthouse'] = is_from_lighthouse
-        if last_name is not None: body['last_name'] = last_name
-        if recipient_type is not None: body['recipient_type'] = recipient_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests',
-                           body=body,
-                           headers=headers)
+            body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict()
+        if comment is not None:
+            body["comment"] = comment
+        if company is not None:
+            body["company"] = company
+        if first_name is not None:
+            body["first_name"] = first_name
+        if intended_use is not None:
+            body["intended_use"] = intended_use
+        if is_from_lighthouse is not None:
+            body["is_from_lighthouse"] = is_from_lighthouse
+        if last_name is not None:
+            body["last_name"] = last_name
+        if recipient_type is not None:
+            body["recipient_type"] = recipient_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests",
+            body=body,
+            headers=headers,
+        )
         return CreatePersonalizationRequestResponse.from_dict(res)
 
     def get(self, listing_id: str) -> GetPersonalizationRequestResponse:
         """Get the personalization request for a listing.
-        
+
         Get the personalization request for a listing. Each consumer can make at *most* one personalization
         request for a listing.
-        
+
         :param listing_id: str
-        
+
         :returns: :class:`GetPersonalizationRequestResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests",
+            headers=headers,
+        )
         return GetPersonalizationRequestResponse.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[PersonalizationRequest]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[PersonalizationRequest]:
         """List all personalization requests.
-        
+
         List personalization requests for a consumer across all listings.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`PersonalizationRequest`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/marketplace-consumer/personalization-requests',
-                                query=query,
-                                headers=headers)
-            if 'personalization_requests' in json:
-                for v in json['personalization_requests']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/marketplace-consumer/personalization-requests",
+                query=query,
+                headers=headers,
+            )
+            if "personalization_requests" in json:
+                for v in json["personalization_requests"]:
                     yield PersonalizationRequest.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class ConsumerProvidersAPI:
@@ -3467,72 +4182,92 @@ def __init__(self, api_client):
 
     def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetProvidersResponse:
         """Get one batch of providers. One may specify up to 50 IDs per request.
-        
+
         Batch get a provider in the Databricks Marketplace with at least one visible listing.
-        
+
         :param ids: List[str] (optional)
-        
+
         :returns: :class:`BatchGetProvidersResponse`
         """
 
         query = {}
-        if ids is not None: query['ids'] = [v for v in ids]
-        headers = {'Accept': 'application/json', }
+        if ids is not None:
+            query["ids"] = [v for v in ids]
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.1/marketplace-consumer/providers:batchGet',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.1/marketplace-consumer/providers:batchGet",
+            query=query,
+            headers=headers,
+        )
         return BatchGetProvidersResponse.from_dict(res)
 
     def get(self, id: str) -> GetProviderResponse:
         """Get a provider.
-        
+
         Get a provider in the Databricks Marketplace with at least one visible listing.
-        
+
         :param id: str
-        
+
         :returns: :class:`GetProviderResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/marketplace-consumer/providers/{id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/marketplace-consumer/providers/{id}",
+            headers=headers,
+        )
         return GetProviderResponse.from_dict(res)
 
-    def list(self,
-             *,
-             is_featured: Optional[bool] = None,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ProviderInfo]:
+    def list(
+        self,
+        *,
+        is_featured: Optional[bool] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ProviderInfo]:
         """List providers.
-        
+
         List all providers in the Databricks Marketplace with at least one visible listing.
-        
+
         :param is_featured: bool (optional)
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ProviderInfo`
         """
 
         query = {}
-        if is_featured is not None: query['is_featured'] = is_featured
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if is_featured is not None:
+            query["is_featured"] = is_featured
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.1/marketplace-consumer/providers',
-                                query=query,
-                                headers=headers)
-            if 'providers' in json:
-                for v in json['providers']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/marketplace-consumer/providers",
+                query=query,
+                headers=headers,
+            )
+            if "providers" in json:
+                for v in json["providers"]:
                     yield ProviderInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class ProviderExchangeFiltersAPI:
@@ -3543,80 +4278,116 @@ def __init__(self, api_client):
 
     def create(self, filter: ExchangeFilter) -> CreateExchangeFilterResponse:
         """Create a new exchange filter.
-        
+
         Add an exchange filter.
-        
+
         :param filter: :class:`ExchangeFilter`
-        
+
         :returns: :class:`CreateExchangeFilterResponse`
         """
         body = {}
-        if filter is not None: body['filter'] = filter.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if filter is not None:
+            body["filter"] = filter.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/marketplace-exchange/filters', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-exchange/filters",
+            body=body,
+            headers=headers,
+        )
         return CreateExchangeFilterResponse.from_dict(res)
 
     def delete(self, id: str):
         """Delete an exchange filter.
-        
+
         Delete an exchange filter
-        
+
         :param id: str
-        
-        
-        """
 
-        headers = {'Accept': 'application/json', }
 
-        self._api.do('DELETE', f'/api/2.0/marketplace-exchange/filters/{id}', headers=headers)
+        """
 
-    def list(self,
-             exchange_id: str,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ExchangeFilter]:
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/marketplace-exchange/filters/{id}",
+            headers=headers,
+        )
+
+    def list(
+        self,
+        exchange_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ExchangeFilter]:
         """List exchange filters.
-        
+
         List exchange filter
-        
+
         :param exchange_id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ExchangeFilter`
         """
 
         query = {}
-        if exchange_id is not None: query['exchange_id'] = exchange_id
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if exchange_id is not None:
+            query["exchange_id"] = exchange_id
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/marketplace-exchange/filters', query=query, headers=headers)
-            if 'filters' in json:
-                for v in json['filters']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-exchange/filters",
+                query=query,
+                headers=headers,
+            )
+            if "filters" in json:
+                for v in json["filters"]:
                     yield ExchangeFilter.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse:
         """Update exchange filter.
-        
+
         Update an exchange filter.
-        
+
         :param id: str
         :param filter: :class:`ExchangeFilter`
-        
+
         :returns: :class:`UpdateExchangeFilterResponse`
         """
         body = {}
-        if filter is not None: body['filter'] = filter.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if filter is not None:
+            body["filter"] = filter.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', f'/api/2.0/marketplace-exchange/filters/{id}', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/marketplace-exchange/filters/{id}",
+            body=body,
+            headers=headers,
+        )
         return UpdateExchangeFilterResponse.from_dict(res)
 
 
@@ -3628,198 +4399,267 @@ def __init__(self, api_client):
 
     def add_listing_to_exchange(self, listing_id: str, exchange_id: str) -> AddExchangeForListingResponse:
         """Add an exchange for listing.
-        
+
         Associate an exchange with a listing
-        
+
         :param listing_id: str
         :param exchange_id: str
-        
+
         :returns: :class:`AddExchangeForListingResponse`
         """
         body = {}
-        if exchange_id is not None: body['exchange_id'] = exchange_id
-        if listing_id is not None: body['listing_id'] = listing_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if exchange_id is not None:
+            body["exchange_id"] = exchange_id
+        if listing_id is not None:
+            body["listing_id"] = listing_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           '/api/2.0/marketplace-exchange/exchanges-for-listing',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-exchange/exchanges-for-listing",
+            body=body,
+            headers=headers,
+        )
         return AddExchangeForListingResponse.from_dict(res)
 
     def create(self, exchange: Exchange) -> CreateExchangeResponse:
         """Create an exchange.
-        
+
         Create an exchange
-        
+
         :param exchange: :class:`Exchange`
-        
+
         :returns: :class:`CreateExchangeResponse`
         """
         body = {}
-        if exchange is not None: body['exchange'] = exchange.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if exchange is not None:
+            body["exchange"] = exchange.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/marketplace-exchange/exchanges', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-exchange/exchanges",
+            body=body,
+            headers=headers,
+        )
         return CreateExchangeResponse.from_dict(res)
 
     def delete(self, id: str):
         """Delete an exchange.
-        
+
         This removes a listing from marketplace.
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/marketplace-exchange/exchanges/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/marketplace-exchange/exchanges/{id}",
+            headers=headers,
+        )
 
     def delete_listing_from_exchange(self, id: str):
         """Remove an exchange for listing.
-        
+
         Disassociate an exchange with a listing
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/marketplace-exchange/exchanges-for-listing/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/marketplace-exchange/exchanges-for-listing/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> GetExchangeResponse:
         """Get an exchange.
-        
+
         Get an exchange.
-        
+
         :param id: str
-        
+
         :returns: :class:`GetExchangeResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/marketplace-exchange/exchanges/{id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/marketplace-exchange/exchanges/{id}",
+            headers=headers,
+        )
         return GetExchangeResponse.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[Exchange]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Exchange]:
         """List exchanges.
-        
+
         List exchanges visible to provider
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`Exchange`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/marketplace-exchange/exchanges',
-                                query=query,
-                                headers=headers)
-            if 'exchanges' in json:
-                for v in json['exchanges']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-exchange/exchanges",
+                query=query,
+                headers=headers,
+            )
+            if "exchanges" in json:
+                for v in json["exchanges"]:
                     yield Exchange.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_exchanges_for_listing(self,
-                                   listing_id: str,
-                                   *,
-                                   page_size: Optional[int] = None,
-                                   page_token: Optional[str] = None) -> Iterator[ExchangeListing]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_exchanges_for_listing(
+        self,
+        listing_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ExchangeListing]:
         """List exchanges for listing.
-        
+
         List exchanges associated with a listing
-        
+
         :param listing_id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ExchangeListing`
         """
 
         query = {}
-        if listing_id is not None: query['listing_id'] = listing_id
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if listing_id is not None:
+            query["listing_id"] = listing_id
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/marketplace-exchange/exchanges-for-listing',
-                                query=query,
-                                headers=headers)
-            if 'exchange_listing' in json:
-                for v in json['exchange_listing']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-exchange/exchanges-for-listing",
+                query=query,
+                headers=headers,
+            )
+            if "exchange_listing" in json:
+                for v in json["exchange_listing"]:
                     yield ExchangeListing.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_listings_for_exchange(self,
-                                   exchange_id: str,
-                                   *,
-                                   page_size: Optional[int] = None,
-                                   page_token: Optional[str] = None) -> Iterator[ExchangeListing]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_listings_for_exchange(
+        self,
+        exchange_id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ExchangeListing]:
         """List listings for exchange.
-        
+
         List listings associated with an exchange
-        
+
         :param exchange_id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ExchangeListing`
         """
 
         query = {}
-        if exchange_id is not None: query['exchange_id'] = exchange_id
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if exchange_id is not None:
+            query["exchange_id"] = exchange_id
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/marketplace-exchange/listings-for-exchange',
-                                query=query,
-                                headers=headers)
-            if 'exchange_listings' in json:
-                for v in json['exchange_listings']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-exchange/listings-for-exchange",
+                query=query,
+                headers=headers,
+            )
+            if "exchange_listings" in json:
+                for v in json["exchange_listings"]:
                     yield ExchangeListing.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, id: str, exchange: Exchange) -> UpdateExchangeResponse:
         """Update exchange.
-        
+
         Update an exchange
-        
+
         :param id: str
         :param exchange: :class:`Exchange`
-        
+
         :returns: :class:`UpdateExchangeResponse`
         """
         body = {}
-        if exchange is not None: body['exchange'] = exchange.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if exchange is not None:
+            body["exchange"] = exchange.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', f'/api/2.0/marketplace-exchange/exchanges/{id}', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/marketplace-exchange/exchanges/{id}",
+            body=body,
+            headers=headers,
+        )
         return UpdateExchangeResponse.from_dict(res)
 
 
@@ -3829,92 +4669,130 @@ class ProviderFilesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               file_parent: FileParent,
-               marketplace_file_type: MarketplaceFileType,
-               mime_type: str,
-               *,
-               display_name: Optional[str] = None) -> CreateFileResponse:
+    def create(
+        self,
+        file_parent: FileParent,
+        marketplace_file_type: MarketplaceFileType,
+        mime_type: str,
+        *,
+        display_name: Optional[str] = None,
+    ) -> CreateFileResponse:
         """Create a file.
-        
+
         Create a file. Currently, only provider icons and attached notebooks are supported.
-        
+
         :param file_parent: :class:`FileParent`
         :param marketplace_file_type: :class:`MarketplaceFileType`
         :param mime_type: str
         :param display_name: str (optional)
-        
+
         :returns: :class:`CreateFileResponse`
         """
         body = {}
-        if display_name is not None: body['display_name'] = display_name
-        if file_parent is not None: body['file_parent'] = file_parent.as_dict()
-        if marketplace_file_type is not None: body['marketplace_file_type'] = marketplace_file_type.value
-        if mime_type is not None: body['mime_type'] = mime_type
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if display_name is not None:
+            body["display_name"] = display_name
+        if file_parent is not None:
+            body["file_parent"] = file_parent.as_dict()
+        if marketplace_file_type is not None:
+            body["marketplace_file_type"] = marketplace_file_type.value
+        if mime_type is not None:
+            body["mime_type"] = mime_type
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/marketplace-provider/files', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-provider/files",
+            body=body,
+            headers=headers,
+        )
         return CreateFileResponse.from_dict(res)
 
     def delete(self, file_id: str):
         """Delete a file.
-        
+
         Delete a file
-        
+
         :param file_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/marketplace-provider/files/{file_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/marketplace-provider/files/{file_id}",
+            headers=headers,
+        )
 
     def get(self, file_id: str) -> GetFileResponse:
         """Get a file.
-        
+
         Get a file
-        
+
         :param file_id: str
-        
+
         :returns: :class:`GetFileResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/marketplace-provider/files/{file_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/marketplace-provider/files/{file_id}",
+            headers=headers,
+        )
         return GetFileResponse.from_dict(res)
 
-    def list(self,
-             file_parent: FileParent,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[FileInfo]:
+    def list(
+        self,
+        file_parent: FileParent,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[FileInfo]:
         """List files.
-        
+
         List files attached to a parent entity.
-        
+
         :param file_parent: :class:`FileParent`
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`FileInfo`
         """
 
         query = {}
-        if file_parent is not None: query['file_parent'] = file_parent.as_dict()
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if file_parent is not None:
+            query["file_parent"] = file_parent.as_dict()
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/marketplace-provider/files', query=query, headers=headers)
-            if 'file_infos' in json:
-                for v in json['file_infos']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-provider/files",
+                query=query,
+                headers=headers,
+            )
+            if "file_infos" in json:
+                for v in json["file_infos"]:
                     yield FileInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class ProviderListingsAPI:
@@ -3926,89 +4804,133 @@ def __init__(self, api_client):
 
     def create(self, listing: Listing) -> CreateListingResponse:
         """Create a listing.
-        
+
         Create a new listing
-        
+
         :param listing: :class:`Listing`
-        
+
         :returns: :class:`CreateListingResponse`
         """
         body = {}
-        if listing is not None: body['listing'] = listing.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if listing is not None:
+            body["listing"] = listing.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/marketplace-provider/listing', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-provider/listing",
+            body=body,
+            headers=headers,
+        )
         return CreateListingResponse.from_dict(res)
 
     def delete(self, id: str):
         """Delete a listing.
-        
+
         Delete a listing
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/marketplace-provider/listings/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/marketplace-provider/listings/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> GetListingResponse:
         """Get a listing.
-        
+
         Get a listing
-        
+
         :param id: str
-        
+
         :returns: :class:`GetListingResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/marketplace-provider/listings/{id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/marketplace-provider/listings/{id}",
+            headers=headers,
+        )
         return GetListingResponse.from_dict(res)
 
-    def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Listing]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Listing]:
         """List listings.
-        
+
         List listings owned by this provider
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`Listing`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/marketplace-provider/listings', query=query, headers=headers)
-            if 'listings' in json:
-                for v in json['listings']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-provider/listings",
+                query=query,
+                headers=headers,
+            )
+            if "listings" in json:
+                for v in json["listings"]:
                     yield Listing.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, id: str, listing: Listing) -> UpdateListingResponse:
         """Update listing.
-        
+
         Update a listing
-        
+
         :param id: str
         :param listing: :class:`Listing`
-        
+
         :returns: :class:`UpdateListingResponse`
         """
         body = {}
-        if listing is not None: body['listing'] = listing.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if listing is not None:
+            body["listing"] = listing.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', f'/api/2.0/marketplace-provider/listings/{id}', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/marketplace-provider/listings/{id}",
+            body=body,
+            headers=headers,
+        )
         return UpdateListingResponse.from_dict(res)
 
 
@@ -4019,68 +4941,85 @@ class ProviderPersonalizationRequestsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[PersonalizationRequest]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[PersonalizationRequest]:
         """All personalization requests across all listings.
-        
+
         List personalization requests to this provider. This will return all personalization requests,
         regardless of which listing they are for.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`PersonalizationRequest`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/marketplace-provider/personalization-requests',
-                                query=query,
-                                headers=headers)
-            if 'personalization_requests' in json:
-                for v in json['personalization_requests']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-provider/personalization-requests",
+                query=query,
+                headers=headers,
+            )
+            if "personalization_requests" in json:
+                for v in json["personalization_requests"]:
                     yield PersonalizationRequest.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               listing_id: str,
-               request_id: str,
-               status: PersonalizationRequestStatus,
-               *,
-               reason: Optional[str] = None,
-               share: Optional[ShareInfo] = None) -> UpdatePersonalizationRequestResponse:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        listing_id: str,
+        request_id: str,
+        status: PersonalizationRequestStatus,
+        *,
+        reason: Optional[str] = None,
+        share: Optional[ShareInfo] = None,
+    ) -> UpdatePersonalizationRequestResponse:
         """Update personalization request status.
-        
+
         Update personalization request. This method only permits updating the status of the request.
-        
+
         :param listing_id: str
         :param request_id: str
         :param status: :class:`PersonalizationRequestStatus`
         :param reason: str (optional)
         :param share: :class:`ShareInfo` (optional)
-        
+
         :returns: :class:`UpdatePersonalizationRequestResponse`
         """
         body = {}
-        if reason is not None: body['reason'] = reason
-        if share is not None: body['share'] = share.as_dict()
-        if status is not None: body['status'] = status.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if reason is not None:
+            body["reason"] = reason
+        if share is not None:
+            body["share"] = share.as_dict()
+        if status is not None:
+            body["status"] = status.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PUT',
-            f'/api/2.0/marketplace-provider/listings/{listing_id}/personalization-requests/{request_id}/request-status',
+            "PUT",
+            f"/api/2.0/marketplace-provider/listings/{listing_id}/personalization-requests/{request_id}/request-status",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return UpdatePersonalizationRequestResponse.from_dict(res)
 
 
@@ -4092,65 +5031,91 @@ def __init__(self, api_client):
 
     def create(self) -> ProviderAnalyticsDashboard:
         """Create provider analytics dashboard.
-        
+
         Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the
         Lakeview dashboard id.
-        
+
         :returns: :class:`ProviderAnalyticsDashboard`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/marketplace-provider/analytics_dashboard', headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-provider/analytics_dashboard",
+            headers=headers,
+        )
         return ProviderAnalyticsDashboard.from_dict(res)
 
     def get(self) -> ListProviderAnalyticsDashboardResponse:
         """Get provider analytics dashboard.
-        
+
         Get provider analytics dashboard.
-        
+
         :returns: :class:`ListProviderAnalyticsDashboardResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/marketplace-provider/analytics_dashboard', headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/marketplace-provider/analytics_dashboard",
+            headers=headers,
+        )
         return ListProviderAnalyticsDashboardResponse.from_dict(res)
 
-    def get_latest_version(self) -> GetLatestVersionProviderAnalyticsDashboardResponse:
+    def get_latest_version(
+        self,
+    ) -> GetLatestVersionProviderAnalyticsDashboardResponse:
         """Get latest version of provider analytics dashboard.
-        
+
         Get latest version of provider analytics dashboard.
-        
+
         :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/marketplace-provider/analytics_dashboard/latest', headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/marketplace-provider/analytics_dashboard/latest",
+            headers=headers,
+        )
         return GetLatestVersionProviderAnalyticsDashboardResponse.from_dict(res)
 
     def update(self, id: str, *, version: Optional[int] = None) -> UpdateProviderAnalyticsDashboardResponse:
         """Update provider analytics dashboard.
-        
+
         Update provider analytics dashboard.
-        
+
         :param id: str
           id is immutable property and can't be updated.
         :param version: int (optional)
           this is the version of the dashboard template we want to update our user to current expectation is
           that it should be equal to latest version of the dashboard template
-        
+
         :returns: :class:`UpdateProviderAnalyticsDashboardResponse`
         """
         body = {}
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.0/marketplace-provider/analytics_dashboard/{id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/marketplace-provider/analytics_dashboard/{id}",
+            body=body,
+            headers=headers,
+        )
         return UpdateProviderAnalyticsDashboardResponse.from_dict(res)
 
 
@@ -4162,93 +5127,131 @@ def __init__(self, api_client):
 
     def create(self, provider: ProviderInfo) -> CreateProviderResponse:
         """Create a provider.
-        
+
         Create a provider
-        
+
         :param provider: :class:`ProviderInfo`
-        
+
         :returns: :class:`CreateProviderResponse`
         """
         body = {}
-        if provider is not None: body['provider'] = provider.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if provider is not None:
+            body["provider"] = provider.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/marketplace-provider/provider', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/marketplace-provider/provider",
+            body=body,
+            headers=headers,
+        )
         return CreateProviderResponse.from_dict(res)
 
     def delete(self, id: str):
         """Delete provider.
-        
+
         Delete provider
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/marketplace-provider/providers/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/marketplace-provider/providers/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> GetProviderResponse:
         """Get provider.
-        
+
         Get provider profile
-        
+
         :param id: str
-        
+
         :returns: :class:`GetProviderResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/marketplace-provider/providers/{id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/marketplace-provider/providers/{id}",
+            headers=headers,
+        )
         return GetProviderResponse.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ProviderInfo]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ProviderInfo]:
         """List providers.
-        
+
         List provider profiles for account.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ProviderInfo`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/marketplace-provider/providers',
-                                query=query,
-                                headers=headers)
-            if 'providers' in json:
-                for v in json['providers']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/marketplace-provider/providers",
+                query=query,
+                headers=headers,
+            )
+            if "providers" in json:
+                for v in json["providers"]:
                     yield ProviderInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def update(self, id: str, provider: ProviderInfo) -> UpdateProviderResponse:
         """Update provider.
-        
+
         Update provider profile
-        
+
         :param id: str
         :param provider: :class:`ProviderInfo`
-        
+
         :returns: :class:`UpdateProviderResponse`
         """
         body = {}
-        if provider is not None: body['provider'] = provider.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if provider is not None:
+            body["provider"] = provider.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', f'/api/2.0/marketplace-provider/providers/{id}', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/marketplace-provider/providers/{id}",
+            body=body,
+            headers=headers,
+        )
         return UpdateProviderResponse.from_dict(res)
diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py
index e551c72ca..385641d4d 100755
--- a/databricks/sdk/service/ml.py
+++ b/databricks/sdk/service/ml.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -78,82 +78,100 @@ class Activity:
     def as_dict(self) -> dict:
         """Serializes the Activity into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.activity_type is not None: body['activity_type'] = self.activity_type.value
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.from_stage is not None: body['from_stage'] = self.from_stage.value
-        if self.id is not None: body['id'] = self.id
+        if self.activity_type is not None:
+            body["activity_type"] = self.activity_type.value
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.from_stage is not None:
+            body["from_stage"] = self.from_stage.value
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.system_comment is not None: body['system_comment'] = self.system_comment
-        if self.to_stage is not None: body['to_stage'] = self.to_stage.value
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.system_comment is not None:
+            body["system_comment"] = self.system_comment
+        if self.to_stage is not None:
+            body["to_stage"] = self.to_stage.value
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Activity into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.activity_type is not None: body['activity_type'] = self.activity_type
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.from_stage is not None: body['from_stage'] = self.from_stage
-        if self.id is not None: body['id'] = self.id
+        if self.activity_type is not None:
+            body["activity_type"] = self.activity_type
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.from_stage is not None:
+            body["from_stage"] = self.from_stage
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.system_comment is not None: body['system_comment'] = self.system_comment
-        if self.to_stage is not None: body['to_stage'] = self.to_stage
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.system_comment is not None:
+            body["system_comment"] = self.system_comment
+        if self.to_stage is not None:
+            body["to_stage"] = self.to_stage
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Activity:
         """Deserializes the Activity from a dictionary."""
-        return cls(activity_type=_enum(d, 'activity_type', ActivityType),
-                   comment=d.get('comment', None),
-                   creation_timestamp=d.get('creation_timestamp', None),
-                   from_stage=_enum(d, 'from_stage', Stage),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   system_comment=d.get('system_comment', None),
-                   to_stage=_enum(d, 'to_stage', Stage),
-                   user_id=d.get('user_id', None))
+        return cls(
+            activity_type=_enum(d, "activity_type", ActivityType),
+            comment=d.get("comment", None),
+            creation_timestamp=d.get("creation_timestamp", None),
+            from_stage=_enum(d, "from_stage", Stage),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            system_comment=d.get("system_comment", None),
+            to_stage=_enum(d, "to_stage", Stage),
+            user_id=d.get("user_id", None),
+        )
 
 
 class ActivityAction(Enum):
     """An action that a user (with sufficient permissions) could take on an activity. Valid values are:
     * `APPROVE_TRANSITION_REQUEST`: Approve a transition request
-    
+
     * `REJECT_TRANSITION_REQUEST`: Reject a transition request
-    
+
     * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request"""
 
-    APPROVE_TRANSITION_REQUEST = 'APPROVE_TRANSITION_REQUEST'
-    CANCEL_TRANSITION_REQUEST = 'CANCEL_TRANSITION_REQUEST'
-    REJECT_TRANSITION_REQUEST = 'REJECT_TRANSITION_REQUEST'
+    APPROVE_TRANSITION_REQUEST = "APPROVE_TRANSITION_REQUEST"
+    CANCEL_TRANSITION_REQUEST = "CANCEL_TRANSITION_REQUEST"
+    REJECT_TRANSITION_REQUEST = "REJECT_TRANSITION_REQUEST"
 
 
 class ActivityType(Enum):
     """Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage
     transition.
-    
+
     * `REQUESTED_TRANSITION`: User requested the corresponding stage transition.
-    
+
     * `CANCELLED_REQUEST`: User cancelled an existing transition request.
-    
+
     * `APPROVED_REQUEST`: User approved the corresponding stage transition.
-    
+
     * `REJECTED_REQUEST`: User rejected the coressponding stage transition.
-    
+
     * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model
     versions in a stage."""
 
-    APPLIED_TRANSITION = 'APPLIED_TRANSITION'
-    APPROVED_REQUEST = 'APPROVED_REQUEST'
-    CANCELLED_REQUEST = 'CANCELLED_REQUEST'
-    NEW_COMMENT = 'NEW_COMMENT'
-    REJECTED_REQUEST = 'REJECTED_REQUEST'
-    REQUESTED_TRANSITION = 'REQUESTED_TRANSITION'
-    SYSTEM_TRANSITION = 'SYSTEM_TRANSITION'
+    APPLIED_TRANSITION = "APPLIED_TRANSITION"
+    APPROVED_REQUEST = "APPROVED_REQUEST"
+    CANCELLED_REQUEST = "CANCELLED_REQUEST"
+    NEW_COMMENT = "NEW_COMMENT"
+    REJECTED_REQUEST = "REJECTED_REQUEST"
+    REQUESTED_TRANSITION = "REQUESTED_TRANSITION"
+    SYSTEM_TRANSITION = "SYSTEM_TRANSITION"
 
 
 @dataclass
@@ -185,32 +203,42 @@ def as_dict(self) -> dict:
         """Serializes the ApproveTransitionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.archive_existing_versions is not None:
-            body['archive_existing_versions'] = self.archive_existing_versions
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage.value
-        if self.version is not None: body['version'] = self.version
+            body["archive_existing_versions"] = self.archive_existing_versions
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage.value
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.archive_existing_versions is not None:
-            body['archive_existing_versions'] = self.archive_existing_versions
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage
-        if self.version is not None: body['version'] = self.version
+            body["archive_existing_versions"] = self.archive_existing_versions
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequest:
         """Deserializes the ApproveTransitionRequest from a dictionary."""
-        return cls(archive_existing_versions=d.get('archive_existing_versions', None),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   stage=_enum(d, 'stage', Stage),
-                   version=d.get('version', None))
+        return cls(
+            archive_existing_versions=d.get("archive_existing_versions", None),
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            stage=_enum(d, "stage", Stage),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -221,29 +249,31 @@ class ApproveTransitionRequestResponse:
     def as_dict(self) -> dict:
         """Serializes the ApproveTransitionRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.activity: body['activity'] = self.activity.as_dict()
+        if self.activity:
+            body["activity"] = self.activity.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ApproveTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.activity: body['activity'] = self.activity
+        if self.activity:
+            body["activity"] = self.activity
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequestResponse:
         """Deserializes the ApproveTransitionRequestResponse from a dictionary."""
-        return cls(activity=_from_dict(d, 'activity', Activity))
+        return cls(activity=_from_dict(d, "activity", Activity))
 
 
 class CommentActivityAction(Enum):
     """An action that a user (with sufficient permissions) could take on a comment. Valid values are: *
     `EDIT_COMMENT`: Edit the comment
-    
+
     * `DELETE_COMMENT`: Delete the comment"""
 
-    DELETE_COMMENT = 'DELETE_COMMENT'
-    EDIT_COMMENT = 'EDIT_COMMENT'
+    DELETE_COMMENT = "DELETE_COMMENT"
+    EDIT_COMMENT = "EDIT_COMMENT"
 
 
 @dataclass
@@ -271,36 +301,48 @@ class CommentObject:
     def as_dict(self) -> dict:
         """Serializes the CommentObject into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.available_actions: body['available_actions'] = [v.value for v in self.available_actions]
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.id is not None: body['id'] = self.id
+        if self.available_actions:
+            body["available_actions"] = [v.value for v in self.available_actions]
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CommentObject into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.available_actions: body['available_actions'] = self.available_actions
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.id is not None: body['id'] = self.id
+        if self.available_actions:
+            body["available_actions"] = self.available_actions
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CommentObject:
         """Deserializes the CommentObject from a dictionary."""
-        return cls(available_actions=_repeated_enum(d, 'available_actions', CommentActivityAction),
-                   comment=d.get('comment', None),
-                   creation_timestamp=d.get('creation_timestamp', None),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   user_id=d.get('user_id', None))
+        return cls(
+            available_actions=_repeated_enum(d, "available_actions", CommentActivityAction),
+            comment=d.get("comment", None),
+            creation_timestamp=d.get("creation_timestamp", None),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -317,23 +359,33 @@ class CreateComment:
     def as_dict(self) -> dict:
         """Serializes the CreateComment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateComment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateComment:
         """Deserializes the CreateComment from a dictionary."""
-        return cls(comment=d.get('comment', None), name=d.get('name', None), version=d.get('version', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -344,19 +396,21 @@ class CreateCommentResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateCommentResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment: body['comment'] = self.comment.as_dict()
+        if self.comment:
+            body["comment"] = self.comment.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCommentResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment: body['comment'] = self.comment
+        if self.comment:
+            body["comment"] = self.comment
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCommentResponse:
         """Deserializes the CreateCommentResponse from a dictionary."""
-        return cls(comment=_from_dict(d, 'comment', CommentObject))
+        return cls(comment=_from_dict(d, "comment", CommentObject))
 
 
 @dataclass
@@ -377,25 +431,33 @@ class CreateExperiment:
     def as_dict(self) -> dict:
         """Serializes the CreateExperiment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.artifact_location is not None:
+            body["artifact_location"] = self.artifact_location
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = self.tags
+        if self.artifact_location is not None:
+            body["artifact_location"] = self.artifact_location
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExperiment:
         """Deserializes the CreateExperiment from a dictionary."""
-        return cls(artifact_location=d.get('artifact_location', None),
-                   name=d.get('name', None),
-                   tags=_repeated_dict(d, 'tags', ExperimentTag))
+        return cls(
+            artifact_location=d.get("artifact_location", None),
+            name=d.get("name", None),
+            tags=_repeated_dict(d, "tags", ExperimentTag),
+        )
 
 
 @dataclass
@@ -406,19 +468,21 @@ class CreateExperimentResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateExperimentResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateExperimentResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExperimentResponse:
         """Deserializes the CreateExperimentResponse from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None))
+        return cls(experiment_id=d.get("experiment_id", None))
 
 
 @dataclass
@@ -435,25 +499,33 @@ class CreateModelRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateModelRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = self.tags
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelRequest:
         """Deserializes the CreateModelRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   tags=_repeated_dict(d, 'tags', ModelTag))
+        return cls(
+            description=d.get("description", None),
+            name=d.get("name", None),
+            tags=_repeated_dict(d, "tags", ModelTag),
+        )
 
 
 @dataclass
@@ -463,19 +535,21 @@ class CreateModelResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateModelResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.registered_model: body['registered_model'] = self.registered_model.as_dict()
+        if self.registered_model:
+            body["registered_model"] = self.registered_model.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateModelResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.registered_model: body['registered_model'] = self.registered_model
+        if self.registered_model:
+            body["registered_model"] = self.registered_model
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelResponse:
         """Deserializes the CreateModelResponse from a dictionary."""
-        return cls(registered_model=_from_dict(d, 'registered_model', Model))
+        return cls(registered_model=_from_dict(d, "registered_model", Model))
 
 
 @dataclass
@@ -503,34 +577,48 @@ class CreateModelVersionRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateModelVersionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_link is not None: body['run_link'] = self.run_link
-        if self.source is not None: body['source'] = self.source
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_link is not None:
+            body["run_link"] = self.run_link
+        if self.source is not None:
+            body["source"] = self.source
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_link is not None: body['run_link'] = self.run_link
-        if self.source is not None: body['source'] = self.source
-        if self.tags: body['tags'] = self.tags
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_link is not None:
+            body["run_link"] = self.run_link
+        if self.source is not None:
+            body["source"] = self.source
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionRequest:
         """Deserializes the CreateModelVersionRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   run_id=d.get('run_id', None),
-                   run_link=d.get('run_link', None),
-                   source=d.get('source', None),
-                   tags=_repeated_dict(d, 'tags', ModelVersionTag))
+        return cls(
+            description=d.get("description", None),
+            name=d.get("name", None),
+            run_id=d.get("run_id", None),
+            run_link=d.get("run_link", None),
+            source=d.get("source", None),
+            tags=_repeated_dict(d, "tags", ModelVersionTag),
+        )
 
 
 @dataclass
@@ -541,19 +629,21 @@ class CreateModelVersionResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_version: body['model_version'] = self.model_version.as_dict()
+        if self.model_version:
+            body["model_version"] = self.model_version.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_version: body['model_version'] = self.model_version
+        if self.model_version:
+            body["model_version"] = self.model_version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionResponse:
         """Deserializes the CreateModelVersionResponse from a dictionary."""
-        return cls(model_version=_from_dict(d, 'model_version', ModelVersion))
+        return cls(model_version=_from_dict(d, "model_version", ModelVersion))
 
 
 @dataclass
@@ -610,34 +700,48 @@ class CreateRegistryWebhook:
     def as_dict(self) -> dict:
         """Serializes the CreateRegistryWebhook into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.events: body['events'] = [v.value for v in self.events]
-        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict()
-        if self.job_spec: body['job_spec'] = self.job_spec.as_dict()
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.status is not None: body['status'] = self.status.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.events:
+            body["events"] = [v.value for v in self.events]
+        if self.http_url_spec:
+            body["http_url_spec"] = self.http_url_spec.as_dict()
+        if self.job_spec:
+            body["job_spec"] = self.job_spec.as_dict()
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.events: body['events'] = self.events
-        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
-        if self.job_spec: body['job_spec'] = self.job_spec
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.status is not None: body['status'] = self.status
+        if self.description is not None:
+            body["description"] = self.description
+        if self.events:
+            body["events"] = self.events
+        if self.http_url_spec:
+            body["http_url_spec"] = self.http_url_spec
+        if self.job_spec:
+            body["job_spec"] = self.job_spec
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRegistryWebhook:
         """Deserializes the CreateRegistryWebhook from a dictionary."""
-        return cls(description=d.get('description', None),
-                   events=_repeated_enum(d, 'events', RegistryWebhookEvent),
-                   http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpec),
-                   job_spec=_from_dict(d, 'job_spec', JobSpec),
-                   model_name=d.get('model_name', None),
-                   status=_enum(d, 'status', RegistryWebhookStatus))
+        return cls(
+            description=d.get("description", None),
+            events=_repeated_enum(d, "events", RegistryWebhookEvent),
+            http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec),
+            job_spec=_from_dict(d, "job_spec", JobSpec),
+            model_name=d.get("model_name", None),
+            status=_enum(d, "status", RegistryWebhookStatus),
+        )
 
 
 @dataclass
@@ -658,28 +762,38 @@ class CreateRun:
     def as_dict(self) -> dict:
         """Serializes the CreateRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.tags: body['tags'] = self.tags
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.tags:
+            body["tags"] = self.tags
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRun:
         """Deserializes the CreateRun from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None),
-                   start_time=d.get('start_time', None),
-                   tags=_repeated_dict(d, 'tags', RunTag),
-                   user_id=d.get('user_id', None))
+        return cls(
+            experiment_id=d.get("experiment_id", None),
+            start_time=d.get("start_time", None),
+            tags=_repeated_dict(d, "tags", RunTag),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -690,19 +804,21 @@ class CreateRunResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateRunResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run: body['run'] = self.run.as_dict()
+        if self.run:
+            body["run"] = self.run.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run: body['run'] = self.run
+        if self.run:
+            body["run"] = self.run
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRunResponse:
         """Deserializes the CreateRunResponse from a dictionary."""
-        return cls(run=_from_dict(d, 'run', Run))
+        return cls(run=_from_dict(d, "run", Run))
 
 
 @dataclass
@@ -730,28 +846,38 @@ class CreateTransitionRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateTransitionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage.value
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage.value
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequest:
         """Deserializes the CreateTransitionRequest from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   stage=_enum(d, 'stage', Stage),
-                   version=d.get('version', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            stage=_enum(d, "stage", Stage),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -762,19 +888,21 @@ class CreateTransitionRequestResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateTransitionRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.request: body['request'] = self.request.as_dict()
+        if self.request:
+            body["request"] = self.request.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.request: body['request'] = self.request
+        if self.request:
+            body["request"] = self.request
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequestResponse:
         """Deserializes the CreateTransitionRequestResponse from a dictionary."""
-        return cls(request=_from_dict(d, 'request', TransitionRequest))
+        return cls(request=_from_dict(d, "request", TransitionRequest))
 
 
 @dataclass
@@ -784,19 +912,21 @@ class CreateWebhookResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateWebhookResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.webhook: body['webhook'] = self.webhook.as_dict()
+        if self.webhook:
+            body["webhook"] = self.webhook.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateWebhookResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.webhook: body['webhook'] = self.webhook
+        if self.webhook:
+            body["webhook"] = self.webhook
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWebhookResponse:
         """Deserializes the CreateWebhookResponse from a dictionary."""
-        return cls(webhook=_from_dict(d, 'webhook', RegistryWebhook))
+        return cls(webhook=_from_dict(d, "webhook", RegistryWebhook))
 
 
 @dataclass
@@ -826,34 +956,48 @@ class Dataset:
     def as_dict(self) -> dict:
         """Serializes the Dataset into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.digest is not None: body['digest'] = self.digest
-        if self.name is not None: body['name'] = self.name
-        if self.profile is not None: body['profile'] = self.profile
-        if self.schema is not None: body['schema'] = self.schema
-        if self.source is not None: body['source'] = self.source
-        if self.source_type is not None: body['source_type'] = self.source_type
+        if self.digest is not None:
+            body["digest"] = self.digest
+        if self.name is not None:
+            body["name"] = self.name
+        if self.profile is not None:
+            body["profile"] = self.profile
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.source is not None:
+            body["source"] = self.source
+        if self.source_type is not None:
+            body["source_type"] = self.source_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Dataset into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.digest is not None: body['digest'] = self.digest
-        if self.name is not None: body['name'] = self.name
-        if self.profile is not None: body['profile'] = self.profile
-        if self.schema is not None: body['schema'] = self.schema
-        if self.source is not None: body['source'] = self.source
-        if self.source_type is not None: body['source_type'] = self.source_type
+        if self.digest is not None:
+            body["digest"] = self.digest
+        if self.name is not None:
+            body["name"] = self.name
+        if self.profile is not None:
+            body["profile"] = self.profile
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.source is not None:
+            body["source"] = self.source
+        if self.source_type is not None:
+            body["source_type"] = self.source_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dataset:
         """Deserializes the Dataset from a dictionary."""
-        return cls(digest=d.get('digest', None),
-                   name=d.get('name', None),
-                   profile=d.get('profile', None),
-                   schema=d.get('schema', None),
-                   source=d.get('source', None),
-                   source_type=d.get('source_type', None))
+        return cls(
+            digest=d.get("digest", None),
+            name=d.get("name", None),
+            profile=d.get("profile", None),
+            schema=d.get("schema", None),
+            source=d.get("source", None),
+            source_type=d.get("source_type", None),
+        )
 
 
 @dataclass
@@ -867,21 +1011,28 @@ class DatasetInput:
     def as_dict(self) -> dict:
         """Serializes the DatasetInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dataset: body['dataset'] = self.dataset.as_dict()
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.dataset:
+            body["dataset"] = self.dataset.as_dict()
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DatasetInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dataset: body['dataset'] = self.dataset
-        if self.tags: body['tags'] = self.tags
+        if self.dataset:
+            body["dataset"] = self.dataset
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatasetInput:
         """Deserializes the DatasetInput from a dictionary."""
-        return cls(dataset=_from_dict(d, 'dataset', Dataset), tags=_repeated_dict(d, 'tags', InputTag))
+        return cls(
+            dataset=_from_dict(d, "dataset", Dataset),
+            tags=_repeated_dict(d, "tags", InputTag),
+        )
 
 
 @dataclass
@@ -911,19 +1062,21 @@ class DeleteExperiment:
     def as_dict(self) -> dict:
         """Serializes the DeleteExperiment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExperiment:
         """Deserializes the DeleteExperiment from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None))
+        return cls(experiment_id=d.get("experiment_id", None))
 
 
 @dataclass
@@ -1029,19 +1182,21 @@ class DeleteRun:
     def as_dict(self) -> dict:
         """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRun:
         """Deserializes the DeleteRun from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -1079,25 +1234,33 @@ class DeleteRuns:
     def as_dict(self) -> dict:
         """Serializes the DeleteRuns into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.max_runs is not None: body['max_runs'] = self.max_runs
-        if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.max_runs is not None:
+            body["max_runs"] = self.max_runs
+        if self.max_timestamp_millis is not None:
+            body["max_timestamp_millis"] = self.max_timestamp_millis
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.max_runs is not None: body['max_runs'] = self.max_runs
-        if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.max_runs is not None:
+            body["max_runs"] = self.max_runs
+        if self.max_timestamp_millis is not None:
+            body["max_timestamp_millis"] = self.max_timestamp_millis
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRuns:
         """Deserializes the DeleteRuns from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None),
-                   max_runs=d.get('max_runs', None),
-                   max_timestamp_millis=d.get('max_timestamp_millis', None))
+        return cls(
+            experiment_id=d.get("experiment_id", None),
+            max_runs=d.get("max_runs", None),
+            max_timestamp_millis=d.get("max_timestamp_millis", None),
+        )
 
 
 @dataclass
@@ -1108,19 +1271,21 @@ class DeleteRunsResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteRunsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted
+        if self.runs_deleted is not None:
+            body["runs_deleted"] = self.runs_deleted
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteRunsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted
+        if self.runs_deleted is not None:
+            body["runs_deleted"] = self.runs_deleted
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunsResponse:
         """Deserializes the DeleteRunsResponse from a dictionary."""
-        return cls(runs_deleted=d.get('runs_deleted', None))
+        return cls(runs_deleted=d.get("runs_deleted", None))
 
 
 @dataclass
@@ -1134,21 +1299,25 @@ class DeleteTag:
     def as_dict(self) -> dict:
         """Serializes the DeleteTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTag:
         """Deserializes the DeleteTag from a dictionary."""
-        return cls(key=d.get('key', None), run_id=d.get('run_id', None))
+        return cls(key=d.get("key", None), run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -1191,10 +1360,10 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteTransitionRequestResponse:
 
 class DeleteTransitionRequestStage(Enum):
 
-    ARCHIVED = 'Archived'
-    NONE = 'None'
-    PRODUCTION = 'Production'
-    STAGING = 'Staging'
+    ARCHIVED = "Archived"
+    NONE = "None"
+    PRODUCTION = "Production"
+    STAGING = "Staging"
 
 
 @dataclass
@@ -1243,37 +1412,53 @@ class Experiment:
     def as_dict(self) -> dict:
         """Serializes the Experiment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.last_update_time is not None: body['last_update_time'] = self.last_update_time
-        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.artifact_location is not None:
+            body["artifact_location"] = self.artifact_location
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.last_update_time is not None:
+            body["last_update_time"] = self.last_update_time
+        if self.lifecycle_stage is not None:
+            body["lifecycle_stage"] = self.lifecycle_stage
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Experiment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.last_update_time is not None: body['last_update_time'] = self.last_update_time
-        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = self.tags
+        if self.artifact_location is not None:
+            body["artifact_location"] = self.artifact_location
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.last_update_time is not None:
+            body["last_update_time"] = self.last_update_time
+        if self.lifecycle_stage is not None:
+            body["lifecycle_stage"] = self.lifecycle_stage
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Experiment:
         """Deserializes the Experiment from a dictionary."""
-        return cls(artifact_location=d.get('artifact_location', None),
-                   creation_time=d.get('creation_time', None),
-                   experiment_id=d.get('experiment_id', None),
-                   last_update_time=d.get('last_update_time', None),
-                   lifecycle_stage=d.get('lifecycle_stage', None),
-                   name=d.get('name', None),
-                   tags=_repeated_dict(d, 'tags', ExperimentTag))
+        return cls(
+            artifact_location=d.get("artifact_location", None),
+            creation_time=d.get("creation_time", None),
+            experiment_id=d.get("experiment_id", None),
+            last_update_time=d.get("last_update_time", None),
+            lifecycle_stage=d.get("lifecycle_stage", None),
+            name=d.get("name", None),
+            tags=_repeated_dict(d, "tags", ExperimentTag),
+        )
 
 
 @dataclass
@@ -1293,30 +1478,38 @@ class ExperimentAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the ExperimentAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlRequest:
         """Deserializes the ExperimentAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", ExperimentPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1339,33 +1532,43 @@ class ExperimentAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the ExperimentAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlResponse:
         """Deserializes the ExperimentAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', ExperimentPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", ExperimentPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1380,33 +1583,41 @@ class ExperimentPermission:
     def as_dict(self) -> dict:
         """Serializes the ExperimentPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermission:
         """Deserializes the ExperimentPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", ExperimentPermissionLevel),
+        )
 
 
 class ExperimentPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_READ = 'CAN_READ'
+    CAN_EDIT = "CAN_EDIT"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_READ = "CAN_READ"
 
 
 @dataclass
@@ -1421,26 +1632,32 @@ def as_dict(self) -> dict:
         """Serializes the ExperimentPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissions:
         """Deserializes the ExperimentPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      ExperimentAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -1453,22 +1670,28 @@ class ExperimentPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the ExperimentPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsDescription:
         """Deserializes the ExperimentPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", ExperimentPermissionLevel),
+        )
 
 
 @dataclass
@@ -1482,23 +1705,27 @@ def as_dict(self) -> dict:
         """Serializes the ExperimentPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsRequest:
         """Deserializes the ExperimentPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      ExperimentAccessControlRequest),
-                   experiment_id=d.get('experiment_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlRequest),
+            experiment_id=d.get("experiment_id", None),
+        )
 
 
 @dataclass
@@ -1512,21 +1739,25 @@ class ExperimentTag:
     def as_dict(self) -> dict:
         """Serializes the ExperimentTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExperimentTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentTag:
         """Deserializes the ExperimentTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -1543,23 +1774,33 @@ class FileInfo:
     def as_dict(self) -> dict:
         """Serializes the FileInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file_size is not None: body['file_size'] = self.file_size
-        if self.is_dir is not None: body['is_dir'] = self.is_dir
-        if self.path is not None: body['path'] = self.path
+        if self.file_size is not None:
+            body["file_size"] = self.file_size
+        if self.is_dir is not None:
+            body["is_dir"] = self.is_dir
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file_size is not None: body['file_size'] = self.file_size
-        if self.is_dir is not None: body['is_dir'] = self.is_dir
-        if self.path is not None: body['path'] = self.path
+        if self.file_size is not None:
+            body["file_size"] = self.file_size
+        if self.is_dir is not None:
+            body["is_dir"] = self.is_dir
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
-        return cls(file_size=d.get('file_size', None), is_dir=d.get('is_dir', None), path=d.get('path', None))
+        return cls(
+            file_size=d.get("file_size", None),
+            is_dir=d.get("is_dir", None),
+            path=d.get("path", None),
+        )
 
 
 @dataclass
@@ -1570,19 +1811,21 @@ class GetExperimentPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetExperimentPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetExperimentPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExperimentPermissionLevelsResponse:
         """Deserializes the GetExperimentPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', ExperimentPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", ExperimentPermissionsDescription))
 
 
 @dataclass
@@ -1593,19 +1836,21 @@ class GetExperimentResponse:
     def as_dict(self) -> dict:
         """Serializes the GetExperimentResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment: body['experiment'] = self.experiment.as_dict()
+        if self.experiment:
+            body["experiment"] = self.experiment.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetExperimentResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment: body['experiment'] = self.experiment
+        if self.experiment:
+            body["experiment"] = self.experiment
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExperimentResponse:
         """Deserializes the GetExperimentResponse from a dictionary."""
-        return cls(experiment=_from_dict(d, 'experiment', Experiment))
+        return cls(experiment=_from_dict(d, "experiment", Experiment))
 
 
 @dataclass
@@ -1619,21 +1864,25 @@ class GetLatestVersionsRequest:
     def as_dict(self) -> dict:
         """Serializes the GetLatestVersionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.stages: body['stages'] = [v for v in self.stages]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stages:
+            body["stages"] = [v for v in self.stages]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.stages: body['stages'] = self.stages
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stages:
+            body["stages"] = self.stages
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsRequest:
         """Deserializes the GetLatestVersionsRequest from a dictionary."""
-        return cls(name=d.get('name', None), stages=d.get('stages', None))
+        return cls(name=d.get("name", None), stages=d.get("stages", None))
 
 
 @dataclass
@@ -1645,19 +1894,21 @@ class GetLatestVersionsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetLatestVersionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions]
+        if self.model_versions:
+            body["model_versions"] = [v.as_dict() for v in self.model_versions]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetLatestVersionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_versions: body['model_versions'] = self.model_versions
+        if self.model_versions:
+            body["model_versions"] = self.model_versions
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsResponse:
         """Deserializes the GetLatestVersionsResponse from a dictionary."""
-        return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersion))
+        return cls(model_versions=_repeated_dict(d, "model_versions", ModelVersion))
 
 
 @dataclass
@@ -1671,22 +1922,28 @@ class GetMetricHistoryResponse:
     def as_dict(self) -> dict:
         """Serializes the GetMetricHistoryResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.metrics:
+            body["metrics"] = [v.as_dict() for v in self.metrics]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetMetricHistoryResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metrics: body['metrics'] = self.metrics
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.metrics:
+            body["metrics"] = self.metrics
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetricHistoryResponse:
         """Deserializes the GetMetricHistoryResponse from a dictionary."""
-        return cls(metrics=_repeated_dict(d, 'metrics', Metric),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            metrics=_repeated_dict(d, "metrics", Metric),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -1697,20 +1954,20 @@ def as_dict(self) -> dict:
         """Serializes the GetModelResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.registered_model_databricks:
-            body['registered_model_databricks'] = self.registered_model_databricks.as_dict()
+            body["registered_model_databricks"] = self.registered_model_databricks.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetModelResponse into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.registered_model_databricks:
-            body['registered_model_databricks'] = self.registered_model_databricks
+            body["registered_model_databricks"] = self.registered_model_databricks
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelResponse:
         """Deserializes the GetModelResponse from a dictionary."""
-        return cls(registered_model_databricks=_from_dict(d, 'registered_model_databricks', ModelDatabricks))
+        return cls(registered_model_databricks=_from_dict(d, "registered_model_databricks", ModelDatabricks))
 
 
 @dataclass
@@ -1721,19 +1978,21 @@ class GetModelVersionDownloadUriResponse:
     def as_dict(self) -> dict:
         """Serializes the GetModelVersionDownloadUriResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
+        if self.artifact_uri is not None:
+            body["artifact_uri"] = self.artifact_uri
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetModelVersionDownloadUriResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
+        if self.artifact_uri is not None:
+            body["artifact_uri"] = self.artifact_uri
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelVersionDownloadUriResponse:
         """Deserializes the GetModelVersionDownloadUriResponse from a dictionary."""
-        return cls(artifact_uri=d.get('artifact_uri', None))
+        return cls(artifact_uri=d.get("artifact_uri", None))
 
 
 @dataclass
@@ -1743,19 +2002,21 @@ class GetModelVersionResponse:
     def as_dict(self) -> dict:
         """Serializes the GetModelVersionResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_version: body['model_version'] = self.model_version.as_dict()
+        if self.model_version:
+            body["model_version"] = self.model_version.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetModelVersionResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_version: body['model_version'] = self.model_version
+        if self.model_version:
+            body["model_version"] = self.model_version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelVersionResponse:
         """Deserializes the GetModelVersionResponse from a dictionary."""
-        return cls(model_version=_from_dict(d, 'model_version', ModelVersion))
+        return cls(model_version=_from_dict(d, "model_version", ModelVersion))
 
 
 @dataclass
@@ -1766,20 +2027,21 @@ class GetRegisteredModelPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetRegisteredModelPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetRegisteredModelPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRegisteredModelPermissionLevelsResponse:
         """Deserializes the GetRegisteredModelPermissionLevelsResponse from a dictionary."""
-        return cls(
-            permission_levels=_repeated_dict(d, 'permission_levels', RegisteredModelPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", RegisteredModelPermissionsDescription))
 
 
 @dataclass
@@ -1790,19 +2052,21 @@ class GetRunResponse:
     def as_dict(self) -> dict:
         """Serializes the GetRunResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run: body['run'] = self.run.as_dict()
+        if self.run:
+            body["run"] = self.run.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetRunResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run: body['run'] = self.run
+        if self.run:
+            body["run"] = self.run
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRunResponse:
         """Deserializes the GetRunResponse from a dictionary."""
-        return cls(run=_from_dict(d, 'run', Run))
+        return cls(run=_from_dict(d, "run", Run))
 
 
 @dataclass
@@ -1829,30 +2093,38 @@ class HttpUrlSpec:
     def as_dict(self) -> dict:
         """Serializes the HttpUrlSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.authorization is not None: body['authorization'] = self.authorization
+        if self.authorization is not None:
+            body["authorization"] = self.authorization
         if self.enable_ssl_verification is not None:
-            body['enable_ssl_verification'] = self.enable_ssl_verification
-        if self.secret is not None: body['secret'] = self.secret
-        if self.url is not None: body['url'] = self.url
+            body["enable_ssl_verification"] = self.enable_ssl_verification
+        if self.secret is not None:
+            body["secret"] = self.secret
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the HttpUrlSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.authorization is not None: body['authorization'] = self.authorization
+        if self.authorization is not None:
+            body["authorization"] = self.authorization
         if self.enable_ssl_verification is not None:
-            body['enable_ssl_verification'] = self.enable_ssl_verification
-        if self.secret is not None: body['secret'] = self.secret
-        if self.url is not None: body['url'] = self.url
+            body["enable_ssl_verification"] = self.enable_ssl_verification
+        if self.secret is not None:
+            body["secret"] = self.secret
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpec:
         """Deserializes the HttpUrlSpec from a dictionary."""
-        return cls(authorization=d.get('authorization', None),
-                   enable_ssl_verification=d.get('enable_ssl_verification', None),
-                   secret=d.get('secret', None),
-                   url=d.get('url', None))
+        return cls(
+            authorization=d.get("authorization", None),
+            enable_ssl_verification=d.get("enable_ssl_verification", None),
+            secret=d.get("secret", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -1871,22 +2143,27 @@ def as_dict(self) -> dict:
         """Serializes the HttpUrlSpecWithoutSecret into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.enable_ssl_verification is not None:
-            body['enable_ssl_verification'] = self.enable_ssl_verification
-        if self.url is not None: body['url'] = self.url
+            body["enable_ssl_verification"] = self.enable_ssl_verification
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the HttpUrlSpecWithoutSecret into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.enable_ssl_verification is not None:
-            body['enable_ssl_verification'] = self.enable_ssl_verification
-        if self.url is not None: body['url'] = self.url
+            body["enable_ssl_verification"] = self.enable_ssl_verification
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpecWithoutSecret:
         """Deserializes the HttpUrlSpecWithoutSecret from a dictionary."""
-        return cls(enable_ssl_verification=d.get('enable_ssl_verification', None), url=d.get('url', None))
+        return cls(
+            enable_ssl_verification=d.get("enable_ssl_verification", None),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -1900,21 +2177,25 @@ class InputTag:
     def as_dict(self) -> dict:
         """Serializes the InputTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the InputTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InputTag:
         """Deserializes the InputTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -1932,25 +2213,33 @@ class JobSpec:
     def as_dict(self) -> dict:
         """Serializes the JobSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_token is not None: body['access_token'] = self.access_token
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        if self.access_token is not None:
+            body["access_token"] = self.access_token
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.workspace_url is not None:
+            body["workspace_url"] = self.workspace_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_token is not None: body['access_token'] = self.access_token
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        if self.access_token is not None:
+            body["access_token"] = self.access_token
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.workspace_url is not None:
+            body["workspace_url"] = self.workspace_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSpec:
         """Deserializes the JobSpec from a dictionary."""
-        return cls(access_token=d.get('access_token', None),
-                   job_id=d.get('job_id', None),
-                   workspace_url=d.get('workspace_url', None))
+        return cls(
+            access_token=d.get("access_token", None),
+            job_id=d.get("job_id", None),
+            workspace_url=d.get("workspace_url", None),
+        )
 
 
 @dataclass
@@ -1966,21 +2255,28 @@ class JobSpecWithoutSecret:
     def as_dict(self) -> dict:
         """Serializes the JobSpecWithoutSecret into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.workspace_url is not None:
+            body["workspace_url"] = self.workspace_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the JobSpecWithoutSecret into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        if self.job_id is not None:
+            body["job_id"] = self.job_id
+        if self.workspace_url is not None:
+            body["workspace_url"] = self.workspace_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSpecWithoutSecret:
         """Deserializes the JobSpecWithoutSecret from a dictionary."""
-        return cls(job_id=d.get('job_id', None), workspace_url=d.get('workspace_url', None))
+        return cls(
+            job_id=d.get("job_id", None),
+            workspace_url=d.get("workspace_url", None),
+        )
 
 
 @dataclass
@@ -1997,25 +2293,33 @@ class ListArtifactsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListArtifactsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.files: body['files'] = [v.as_dict() for v in self.files]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.root_uri is not None: body['root_uri'] = self.root_uri
+        if self.files:
+            body["files"] = [v.as_dict() for v in self.files]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.root_uri is not None:
+            body["root_uri"] = self.root_uri
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListArtifactsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.files: body['files'] = self.files
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.root_uri is not None: body['root_uri'] = self.root_uri
+        if self.files:
+            body["files"] = self.files
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.root_uri is not None:
+            body["root_uri"] = self.root_uri
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListArtifactsResponse:
         """Deserializes the ListArtifactsResponse from a dictionary."""
-        return cls(files=_repeated_dict(d, 'files', FileInfo),
-                   next_page_token=d.get('next_page_token', None),
-                   root_uri=d.get('root_uri', None))
+        return cls(
+            files=_repeated_dict(d, "files", FileInfo),
+            next_page_token=d.get("next_page_token", None),
+            root_uri=d.get("root_uri", None),
+        )
 
 
 @dataclass
@@ -2030,22 +2334,28 @@ class ListExperimentsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListExperimentsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiments: body['experiments'] = [v.as_dict() for v in self.experiments]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.experiments:
+            body["experiments"] = [v.as_dict() for v in self.experiments]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListExperimentsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiments: body['experiments'] = self.experiments
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.experiments:
+            body["experiments"] = self.experiments
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExperimentsResponse:
         """Deserializes the ListExperimentsResponse from a dictionary."""
-        return cls(experiments=_repeated_dict(d, 'experiments', Experiment),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            experiments=_repeated_dict(d, "experiments", Experiment),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -2058,22 +2368,28 @@ class ListModelsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListModelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.registered_models:
+            body["registered_models"] = [v.as_dict() for v in self.registered_models]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListModelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.registered_models: body['registered_models'] = self.registered_models
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.registered_models:
+            body["registered_models"] = self.registered_models
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListModelsResponse:
         """Deserializes the ListModelsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   registered_models=_repeated_dict(d, 'registered_models', Model))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            registered_models=_repeated_dict(d, "registered_models", Model),
+        )
 
 
 @dataclass
@@ -2087,22 +2403,28 @@ class ListRegistryWebhooks:
     def as_dict(self) -> dict:
         """Serializes the ListRegistryWebhooks into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.webhooks: body['webhooks'] = [v.as_dict() for v in self.webhooks]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.webhooks:
+            body["webhooks"] = [v.as_dict() for v in self.webhooks]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListRegistryWebhooks into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.webhooks: body['webhooks'] = self.webhooks
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.webhooks:
+            body["webhooks"] = self.webhooks
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRegistryWebhooks:
         """Deserializes the ListRegistryWebhooks from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   webhooks=_repeated_dict(d, 'webhooks', RegistryWebhook))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            webhooks=_repeated_dict(d, "webhooks", RegistryWebhook),
+        )
 
 
 @dataclass
@@ -2113,19 +2435,21 @@ class ListTransitionRequestsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListTransitionRequestsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.requests: body['requests'] = [v.as_dict() for v in self.requests]
+        if self.requests:
+            body["requests"] = [v.as_dict() for v in self.requests]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListTransitionRequestsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.requests: body['requests'] = self.requests
+        if self.requests:
+            body["requests"] = self.requests
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTransitionRequestsResponse:
         """Deserializes the ListTransitionRequestsResponse from a dictionary."""
-        return cls(requests=_repeated_dict(d, 'requests', Activity))
+        return cls(requests=_repeated_dict(d, "requests", Activity))
 
 
 @dataclass
@@ -2148,28 +2472,38 @@ class LogBatch:
     def as_dict(self) -> dict:
         """Serializes the LogBatch into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics]
-        if self.params: body['params'] = [v.as_dict() for v in self.params]
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.metrics:
+            body["metrics"] = [v.as_dict() for v in self.metrics]
+        if self.params:
+            body["params"] = [v.as_dict() for v in self.params]
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogBatch into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metrics: body['metrics'] = self.metrics
-        if self.params: body['params'] = self.params
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.tags: body['tags'] = self.tags
+        if self.metrics:
+            body["metrics"] = self.metrics
+        if self.params:
+            body["params"] = self.params
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogBatch:
         """Deserializes the LogBatch from a dictionary."""
-        return cls(metrics=_repeated_dict(d, 'metrics', Metric),
-                   params=_repeated_dict(d, 'params', Param),
-                   run_id=d.get('run_id', None),
-                   tags=_repeated_dict(d, 'tags', RunTag))
+        return cls(
+            metrics=_repeated_dict(d, "metrics", Metric),
+            params=_repeated_dict(d, "params", Param),
+            run_id=d.get("run_id", None),
+            tags=_repeated_dict(d, "tags", RunTag),
+        )
 
 
 @dataclass
@@ -2202,21 +2536,28 @@ class LogInputs:
     def as_dict(self) -> dict:
         """Serializes the LogInputs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets]
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.datasets:
+            body["datasets"] = [v.as_dict() for v in self.datasets]
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogInputs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.datasets: body['datasets'] = self.datasets
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.datasets:
+            body["datasets"] = self.datasets
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogInputs:
         """Deserializes the LogInputs from a dictionary."""
-        return cls(datasets=_repeated_dict(d, 'datasets', DatasetInput), run_id=d.get('run_id', None))
+        return cls(
+            datasets=_repeated_dict(d, "datasets", DatasetInput),
+            run_id=d.get("run_id", None),
+        )
 
 
 @dataclass
@@ -2262,34 +2603,48 @@ class LogMetric:
     def as_dict(self) -> dict:
         """Serializes the LogMetric into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.step is not None: body['step'] = self.step
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.step is not None:
+            body["step"] = self.step
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogMetric into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.step is not None: body['step'] = self.step
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.step is not None:
+            body["step"] = self.step
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogMetric:
         """Deserializes the LogMetric from a dictionary."""
-        return cls(key=d.get('key', None),
-                   run_id=d.get('run_id', None),
-                   run_uuid=d.get('run_uuid', None),
-                   step=d.get('step', None),
-                   timestamp=d.get('timestamp', None),
-                   value=d.get('value', None))
+        return cls(
+            key=d.get("key", None),
+            run_id=d.get("run_id", None),
+            run_uuid=d.get("run_uuid", None),
+            step=d.get("step", None),
+            timestamp=d.get("timestamp", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -2322,21 +2677,25 @@ class LogModel:
     def as_dict(self) -> dict:
         """Serializes the LogModel into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_json is not None: body['model_json'] = self.model_json
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.model_json is not None:
+            body["model_json"] = self.model_json
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogModel into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_json is not None: body['model_json'] = self.model_json
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.model_json is not None:
+            body["model_json"] = self.model_json
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogModel:
         """Deserializes the LogModel from a dictionary."""
-        return cls(model_json=d.get('model_json', None), run_id=d.get('run_id', None))
+        return cls(model_json=d.get("model_json", None), run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -2376,28 +2735,38 @@ class LogParam:
     def as_dict(self) -> dict:
         """Serializes the LogParam into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LogParam into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogParam:
         """Deserializes the LogParam from a dictionary."""
-        return cls(key=d.get('key', None),
-                   run_id=d.get('run_id', None),
-                   run_uuid=d.get('run_uuid', None),
-                   value=d.get('value', None))
+        return cls(
+            key=d.get("key", None),
+            run_id=d.get("run_id", None),
+            run_uuid=d.get("run_uuid", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -2436,28 +2805,38 @@ class Metric:
     def as_dict(self) -> dict:
         """Serializes the Metric into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.step is not None: body['step'] = self.step
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.step is not None:
+            body["step"] = self.step
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Metric into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.step is not None: body['step'] = self.step
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.step is not None:
+            body["step"] = self.step
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Metric:
         """Deserializes the Metric from a dictionary."""
-        return cls(key=d.get('key', None),
-                   step=d.get('step', None),
-                   timestamp=d.get('timestamp', None),
-                   value=d.get('value', None))
+        return cls(
+            key=d.get("key", None),
+            step=d.get("step", None),
+            timestamp=d.get("timestamp", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -2487,39 +2866,53 @@ class Model:
     def as_dict(self) -> dict:
         """Serializes the Model into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.description is not None: body['description'] = self.description
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.description is not None:
+            body["description"] = self.description
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions]
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.latest_versions:
+            body["latest_versions"] = [v.as_dict() for v in self.latest_versions]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Model into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.description is not None: body['description'] = self.description
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.description is not None:
+            body["description"] = self.description
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.latest_versions: body['latest_versions'] = self.latest_versions
-        if self.name is not None: body['name'] = self.name
-        if self.tags: body['tags'] = self.tags
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.latest_versions:
+            body["latest_versions"] = self.latest_versions
+        if self.name is not None:
+            body["name"] = self.name
+        if self.tags:
+            body["tags"] = self.tags
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Model:
         """Deserializes the Model from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   description=d.get('description', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   latest_versions=_repeated_dict(d, 'latest_versions', ModelVersion),
-                   name=d.get('name', None),
-                   tags=_repeated_dict(d, 'tags', ModelTag),
-                   user_id=d.get('user_id', None))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            description=d.get("description", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            latest_versions=_repeated_dict(d, "latest_versions", ModelVersion),
+            name=d.get("name", None),
+            tags=_repeated_dict(d, "tags", ModelTag),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -2555,45 +2948,63 @@ class ModelDatabricks:
     def as_dict(self) -> dict:
         """Serializes the ModelDatabricks into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions]
-        if self.name is not None: body['name'] = self.name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.latest_versions:
+            body["latest_versions"] = [v.as_dict() for v in self.latest_versions]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelDatabricks into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.latest_versions: body['latest_versions'] = self.latest_versions
-        if self.name is not None: body['name'] = self.name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
-        if self.tags: body['tags'] = self.tags
-        if self.user_id is not None: body['user_id'] = self.user_id
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.latest_versions:
+            body["latest_versions"] = self.latest_versions
+        if self.name is not None:
+            body["name"] = self.name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
+        if self.tags:
+            body["tags"] = self.tags
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDatabricks:
         """Deserializes the ModelDatabricks from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   description=d.get('description', None),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   latest_versions=_repeated_dict(d, 'latest_versions', ModelVersion),
-                   name=d.get('name', None),
-                   permission_level=_enum(d, 'permission_level', PermissionLevel),
-                   tags=_repeated_dict(d, 'tags', ModelTag),
-                   user_id=d.get('user_id', None))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            description=d.get("description", None),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            latest_versions=_repeated_dict(d, "latest_versions", ModelVersion),
+            name=d.get("name", None),
+            permission_level=_enum(d, "permission_level", PermissionLevel),
+            tags=_repeated_dict(d, "tags", ModelTag),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -2607,21 +3018,25 @@ class ModelTag:
     def as_dict(self) -> dict:
         """Serializes the ModelTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelTag:
         """Deserializes the ModelTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -2669,57 +3084,83 @@ class ModelVersion:
     def as_dict(self) -> dict:
         """Serializes the ModelVersion into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.current_stage is not None: body['current_stage'] = self.current_stage
-        if self.description is not None: body['description'] = self.description
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.current_stage is not None:
+            body["current_stage"] = self.current_stage
+        if self.description is not None:
+            body["description"] = self.description
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_link is not None: body['run_link'] = self.run_link
-        if self.source is not None: body['source'] = self.source
-        if self.status is not None: body['status'] = self.status.value
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.version is not None: body['version'] = self.version
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_link is not None:
+            body["run_link"] = self.run_link
+        if self.source is not None:
+            body["source"] = self.source
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelVersion into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.current_stage is not None: body['current_stage'] = self.current_stage
-        if self.description is not None: body['description'] = self.description
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.current_stage is not None:
+            body["current_stage"] = self.current_stage
+        if self.description is not None:
+            body["description"] = self.description
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_link is not None: body['run_link'] = self.run_link
-        if self.source is not None: body['source'] = self.source
-        if self.status is not None: body['status'] = self.status
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.tags: body['tags'] = self.tags
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.version is not None: body['version'] = self.version
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_link is not None:
+            body["run_link"] = self.run_link
+        if self.source is not None:
+            body["source"] = self.source
+        if self.status is not None:
+            body["status"] = self.status
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.tags:
+            body["tags"] = self.tags
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersion:
         """Deserializes the ModelVersion from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   current_stage=d.get('current_stage', None),
-                   description=d.get('description', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   name=d.get('name', None),
-                   run_id=d.get('run_id', None),
-                   run_link=d.get('run_link', None),
-                   source=d.get('source', None),
-                   status=_enum(d, 'status', ModelVersionStatus),
-                   status_message=d.get('status_message', None),
-                   tags=_repeated_dict(d, 'tags', ModelVersionTag),
-                   user_id=d.get('user_id', None),
-                   version=d.get('version', None))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            current_stage=d.get("current_stage", None),
+            description=d.get("description", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            name=d.get("name", None),
+            run_id=d.get("run_id", None),
+            run_link=d.get("run_link", None),
+            source=d.get("source", None),
+            status=_enum(d, "status", ModelVersionStatus),
+            status_message=d.get("status_message", None),
+            tags=_repeated_dict(d, "tags", ModelVersionTag),
+            user_id=d.get("user_id", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -2786,68 +3227,96 @@ class ModelVersionDatabricks:
     def as_dict(self) -> dict:
         """Serializes the ModelVersionDatabricks into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.current_stage is not None: body['current_stage'] = self.current_stage.value
-        if self.description is not None: body['description'] = self.description
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.current_stage is not None:
+            body["current_stage"] = self.current_stage.value
+        if self.description is not None:
+            body["description"] = self.description
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_link is not None: body['run_link'] = self.run_link
-        if self.source is not None: body['source'] = self.source
-        if self.status is not None: body['status'] = self.status.value
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.version is not None: body['version'] = self.version
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_link is not None:
+            body["run_link"] = self.run_link
+        if self.source is not None:
+            body["source"] = self.source
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelVersionDatabricks into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.current_stage is not None: body['current_stage'] = self.current_stage
-        if self.description is not None: body['description'] = self.description
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.current_stage is not None:
+            body["current_stage"] = self.current_stage
+        if self.description is not None:
+            body["description"] = self.description
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_link is not None: body['run_link'] = self.run_link
-        if self.source is not None: body['source'] = self.source
-        if self.status is not None: body['status'] = self.status
-        if self.status_message is not None: body['status_message'] = self.status_message
-        if self.tags: body['tags'] = self.tags
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.version is not None: body['version'] = self.version
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_link is not None:
+            body["run_link"] = self.run_link
+        if self.source is not None:
+            body["source"] = self.source
+        if self.status is not None:
+            body["status"] = self.status
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
+        if self.tags:
+            body["tags"] = self.tags
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionDatabricks:
         """Deserializes the ModelVersionDatabricks from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   current_stage=_enum(d, 'current_stage', Stage),
-                   description=d.get('description', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   name=d.get('name', None),
-                   permission_level=_enum(d, 'permission_level', PermissionLevel),
-                   run_id=d.get('run_id', None),
-                   run_link=d.get('run_link', None),
-                   source=d.get('source', None),
-                   status=_enum(d, 'status', Status),
-                   status_message=d.get('status_message', None),
-                   tags=_repeated_dict(d, 'tags', ModelVersionTag),
-                   user_id=d.get('user_id', None),
-                   version=d.get('version', None))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            current_stage=_enum(d, "current_stage", Stage),
+            description=d.get("description", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            name=d.get("name", None),
+            permission_level=_enum(d, "permission_level", PermissionLevel),
+            run_id=d.get("run_id", None),
+            run_link=d.get("run_link", None),
+            source=d.get("source", None),
+            status=_enum(d, "status", Status),
+            status_message=d.get("status_message", None),
+            tags=_repeated_dict(d, "tags", ModelVersionTag),
+            user_id=d.get("user_id", None),
+            version=d.get("version", None),
+        )
 
 
 class ModelVersionStatus(Enum):
     """Current status of `model_version`"""
 
-    FAILED_REGISTRATION = 'FAILED_REGISTRATION'
-    PENDING_REGISTRATION = 'PENDING_REGISTRATION'
-    READY = 'READY'
+    FAILED_REGISTRATION = "FAILED_REGISTRATION"
+    PENDING_REGISTRATION = "PENDING_REGISTRATION"
+    READY = "READY"
 
 
 @dataclass
@@ -2861,21 +3330,25 @@ class ModelVersionTag:
     def as_dict(self) -> dict:
         """Serializes the ModelVersionTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelVersionTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionTag:
         """Deserializes the ModelVersionTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -2889,32 +3362,36 @@ class Param:
     def as_dict(self) -> dict:
         """Serializes the Param into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Param into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Param:
         """Deserializes the Param from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 class PermissionLevel(Enum):
     """Permission level of the requesting user on the object. For what is allowed at each level, see
     [MLflow Model permissions](..)."""
 
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS'
-    CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS'
-    CAN_READ = 'CAN_READ'
+    CAN_EDIT = "CAN_EDIT"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS"
+    CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS"
+    CAN_READ = "CAN_READ"
 
 
 @dataclass
@@ -2934,30 +3411,38 @@ class RegisteredModelAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the RegisteredModelAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlRequest:
         """Deserializes the RegisteredModelAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -2980,33 +3465,43 @@ class RegisteredModelAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the RegisteredModelAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlResponse:
         """Deserializes the RegisteredModelAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', RegisteredModelPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", RegisteredModelPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -3021,35 +3516,43 @@ class RegisteredModelPermission:
     def as_dict(self) -> dict:
         """Serializes the RegisteredModelPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermission:
         """Deserializes the RegisteredModelPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel),
+        )
 
 
 class RegisteredModelPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS'
-    CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS'
-    CAN_READ = 'CAN_READ'
+    CAN_EDIT = "CAN_EDIT"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS"
+    CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS"
+    CAN_READ = "CAN_READ"
 
 
 @dataclass
@@ -3064,26 +3567,32 @@ def as_dict(self) -> dict:
         """Serializes the RegisteredModelPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissions:
         """Deserializes the RegisteredModelPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      RegisteredModelAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -3096,22 +3605,28 @@ class RegisteredModelPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the RegisteredModelPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsDescription:
         """Deserializes the RegisteredModelPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel),
+        )
 
 
 @dataclass
@@ -3125,23 +3640,27 @@ def as_dict(self) -> dict:
         """Serializes the RegisteredModelPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.registered_model_id is not None:
+            body["registered_model_id"] = self.registered_model_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.registered_model_id is not None:
+            body["registered_model_id"] = self.registered_model_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsRequest:
         """Deserializes the RegisteredModelPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      RegisteredModelAccessControlRequest),
-                   registered_model_id=d.get('registered_model_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlRequest),
+            registered_model_id=d.get("registered_model_id", None),
+        )
 
 
 @dataclass
@@ -3207,75 +3726,93 @@ class RegistryWebhook:
     def as_dict(self) -> dict:
         """Serializes the RegistryWebhook into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.description is not None: body['description'] = self.description
-        if self.events: body['events'] = [v.value for v in self.events]
-        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.job_spec: body['job_spec'] = self.job_spec.as_dict()
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.description is not None:
+            body["description"] = self.description
+        if self.events:
+            body["events"] = [v.value for v in self.events]
+        if self.http_url_spec:
+            body["http_url_spec"] = self.http_url_spec.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.job_spec:
+            body["job_spec"] = self.job_spec.as_dict()
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.status is not None: body['status'] = self.status.value
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RegistryWebhook into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.description is not None: body['description'] = self.description
-        if self.events: body['events'] = self.events
-        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
-        if self.id is not None: body['id'] = self.id
-        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.description is not None:
+            body["description"] = self.description
+        if self.events:
+            body["events"] = self.events
+        if self.http_url_spec:
+            body["http_url_spec"] = self.http_url_spec
+        if self.id is not None:
+            body["id"] = self.id
+        if self.job_spec:
+            body["job_spec"] = self.job_spec
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.status is not None: body['status'] = self.status
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegistryWebhook:
         """Deserializes the RegistryWebhook from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   description=d.get('description', None),
-                   events=_repeated_enum(d, 'events', RegistryWebhookEvent),
-                   http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpecWithoutSecret),
-                   id=d.get('id', None),
-                   job_spec=_from_dict(d, 'job_spec', JobSpecWithoutSecret),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   model_name=d.get('model_name', None),
-                   status=_enum(d, 'status', RegistryWebhookStatus))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            description=d.get("description", None),
+            events=_repeated_enum(d, "events", RegistryWebhookEvent),
+            http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpecWithoutSecret),
+            id=d.get("id", None),
+            job_spec=_from_dict(d, "job_spec", JobSpecWithoutSecret),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            model_name=d.get("model_name", None),
+            status=_enum(d, "status", RegistryWebhookStatus),
+        )
 
 
 class RegistryWebhookEvent(Enum):
 
-    COMMENT_CREATED = 'COMMENT_CREATED'
-    MODEL_VERSION_CREATED = 'MODEL_VERSION_CREATED'
-    MODEL_VERSION_TAG_SET = 'MODEL_VERSION_TAG_SET'
-    MODEL_VERSION_TRANSITIONED_STAGE = 'MODEL_VERSION_TRANSITIONED_STAGE'
-    MODEL_VERSION_TRANSITIONED_TO_ARCHIVED = 'MODEL_VERSION_TRANSITIONED_TO_ARCHIVED'
-    MODEL_VERSION_TRANSITIONED_TO_PRODUCTION = 'MODEL_VERSION_TRANSITIONED_TO_PRODUCTION'
-    MODEL_VERSION_TRANSITIONED_TO_STAGING = 'MODEL_VERSION_TRANSITIONED_TO_STAGING'
-    REGISTERED_MODEL_CREATED = 'REGISTERED_MODEL_CREATED'
-    TRANSITION_REQUEST_CREATED = 'TRANSITION_REQUEST_CREATED'
-    TRANSITION_REQUEST_TO_ARCHIVED_CREATED = 'TRANSITION_REQUEST_TO_ARCHIVED_CREATED'
-    TRANSITION_REQUEST_TO_PRODUCTION_CREATED = 'TRANSITION_REQUEST_TO_PRODUCTION_CREATED'
-    TRANSITION_REQUEST_TO_STAGING_CREATED = 'TRANSITION_REQUEST_TO_STAGING_CREATED'
+    COMMENT_CREATED = "COMMENT_CREATED"
+    MODEL_VERSION_CREATED = "MODEL_VERSION_CREATED"
+    MODEL_VERSION_TAG_SET = "MODEL_VERSION_TAG_SET"
+    MODEL_VERSION_TRANSITIONED_STAGE = "MODEL_VERSION_TRANSITIONED_STAGE"
+    MODEL_VERSION_TRANSITIONED_TO_ARCHIVED = "MODEL_VERSION_TRANSITIONED_TO_ARCHIVED"
+    MODEL_VERSION_TRANSITIONED_TO_PRODUCTION = "MODEL_VERSION_TRANSITIONED_TO_PRODUCTION"
+    MODEL_VERSION_TRANSITIONED_TO_STAGING = "MODEL_VERSION_TRANSITIONED_TO_STAGING"
+    REGISTERED_MODEL_CREATED = "REGISTERED_MODEL_CREATED"
+    TRANSITION_REQUEST_CREATED = "TRANSITION_REQUEST_CREATED"
+    TRANSITION_REQUEST_TO_ARCHIVED_CREATED = "TRANSITION_REQUEST_TO_ARCHIVED_CREATED"
+    TRANSITION_REQUEST_TO_PRODUCTION_CREATED = "TRANSITION_REQUEST_TO_PRODUCTION_CREATED"
+    TRANSITION_REQUEST_TO_STAGING_CREATED = "TRANSITION_REQUEST_TO_STAGING_CREATED"
 
 
 class RegistryWebhookStatus(Enum):
     """Enable or disable triggering the webhook, or put the webhook into test mode. The default is
     `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-    
+
     * `DISABLED`: Webhook is not triggered.
-    
+
     * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a
     real event."""
 
-    ACTIVE = 'ACTIVE'
-    DISABLED = 'DISABLED'
-    TEST_MODE = 'TEST_MODE'
+    ACTIVE = "ACTIVE"
+    DISABLED = "DISABLED"
+    TEST_MODE = "TEST_MODE"
 
 
 @dataclass
@@ -3303,28 +3840,38 @@ class RejectTransitionRequest:
     def as_dict(self) -> dict:
         """Serializes the RejectTransitionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage.value
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage.value
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage
-        if self.version is not None: body['version'] = self.version
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequest:
         """Deserializes the RejectTransitionRequest from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   stage=_enum(d, 'stage', Stage),
-                   version=d.get('version', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            stage=_enum(d, "stage", Stage),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -3335,19 +3882,21 @@ class RejectTransitionRequestResponse:
     def as_dict(self) -> dict:
         """Serializes the RejectTransitionRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.activity: body['activity'] = self.activity.as_dict()
+        if self.activity:
+            body["activity"] = self.activity.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RejectTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.activity: body['activity'] = self.activity
+        if self.activity:
+            body["activity"] = self.activity
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequestResponse:
         """Deserializes the RejectTransitionRequestResponse from a dictionary."""
-        return cls(activity=_from_dict(d, 'activity', Activity))
+        return cls(activity=_from_dict(d, "activity", Activity))
 
 
 @dataclass
@@ -3361,21 +3910,25 @@ class RenameModelRequest:
     def as_dict(self) -> dict:
         """Serializes the RenameModelRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RenameModelRequest:
         """Deserializes the RenameModelRequest from a dictionary."""
-        return cls(name=d.get('name', None), new_name=d.get('new_name', None))
+        return cls(name=d.get("name", None), new_name=d.get("new_name", None))
 
 
 @dataclass
@@ -3385,19 +3938,21 @@ class RenameModelResponse:
     def as_dict(self) -> dict:
         """Serializes the RenameModelResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.registered_model: body['registered_model'] = self.registered_model.as_dict()
+        if self.registered_model:
+            body["registered_model"] = self.registered_model.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RenameModelResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.registered_model: body['registered_model'] = self.registered_model
+        if self.registered_model:
+            body["registered_model"] = self.registered_model
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RenameModelResponse:
         """Deserializes the RenameModelResponse from a dictionary."""
-        return cls(registered_model=_from_dict(d, 'registered_model', Model))
+        return cls(registered_model=_from_dict(d, "registered_model", Model))
 
 
 @dataclass
@@ -3408,19 +3963,21 @@ class RestoreExperiment:
     def as_dict(self) -> dict:
         """Serializes the RestoreExperiment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreExperiment:
         """Deserializes the RestoreExperiment from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None))
+        return cls(experiment_id=d.get("experiment_id", None))
 
 
 @dataclass
@@ -3450,19 +4007,21 @@ class RestoreRun:
     def as_dict(self) -> dict:
         """Serializes the RestoreRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestoreRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRun:
         """Deserializes the RestoreRun from a dictionary."""
-        return cls(run_id=d.get('run_id', None))
+        return cls(run_id=d.get("run_id", None))
 
 
 @dataclass
@@ -3500,25 +4059,33 @@ class RestoreRuns:
     def as_dict(self) -> dict:
         """Serializes the RestoreRuns into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.max_runs is not None: body['max_runs'] = self.max_runs
-        if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.max_runs is not None:
+            body["max_runs"] = self.max_runs
+        if self.min_timestamp_millis is not None:
+            body["min_timestamp_millis"] = self.min_timestamp_millis
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.max_runs is not None: body['max_runs'] = self.max_runs
-        if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.max_runs is not None:
+            body["max_runs"] = self.max_runs
+        if self.min_timestamp_millis is not None:
+            body["min_timestamp_millis"] = self.min_timestamp_millis
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRuns:
         """Deserializes the RestoreRuns from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None),
-                   max_runs=d.get('max_runs', None),
-                   min_timestamp_millis=d.get('min_timestamp_millis', None))
+        return cls(
+            experiment_id=d.get("experiment_id", None),
+            max_runs=d.get("max_runs", None),
+            min_timestamp_millis=d.get("min_timestamp_millis", None),
+        )
 
 
 @dataclass
@@ -3529,19 +4096,21 @@ class RestoreRunsResponse:
     def as_dict(self) -> dict:
         """Serializes the RestoreRunsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.runs_restored is not None: body['runs_restored'] = self.runs_restored
+        if self.runs_restored is not None:
+            body["runs_restored"] = self.runs_restored
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestoreRunsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.runs_restored is not None: body['runs_restored'] = self.runs_restored
+        if self.runs_restored is not None:
+            body["runs_restored"] = self.runs_restored
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRunsResponse:
         """Deserializes the RestoreRunsResponse from a dictionary."""
-        return cls(runs_restored=d.get('runs_restored', None))
+        return cls(runs_restored=d.get("runs_restored", None))
 
 
 @dataclass
@@ -3558,25 +4127,33 @@ class Run:
     def as_dict(self) -> dict:
         """Serializes the Run into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data: body['data'] = self.data.as_dict()
-        if self.info: body['info'] = self.info.as_dict()
-        if self.inputs: body['inputs'] = self.inputs.as_dict()
+        if self.data:
+            body["data"] = self.data.as_dict()
+        if self.info:
+            body["info"] = self.info.as_dict()
+        if self.inputs:
+            body["inputs"] = self.inputs.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Run into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data: body['data'] = self.data
-        if self.info: body['info'] = self.info
-        if self.inputs: body['inputs'] = self.inputs
+        if self.data:
+            body["data"] = self.data
+        if self.info:
+            body["info"] = self.info
+        if self.inputs:
+            body["inputs"] = self.inputs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Run:
         """Deserializes the Run from a dictionary."""
-        return cls(data=_from_dict(d, 'data', RunData),
-                   info=_from_dict(d, 'info', RunInfo),
-                   inputs=_from_dict(d, 'inputs', RunInputs))
+        return cls(
+            data=_from_dict(d, "data", RunData),
+            info=_from_dict(d, "info", RunInfo),
+            inputs=_from_dict(d, "inputs", RunInputs),
+        )
 
 
 @dataclass
@@ -3593,25 +4170,33 @@ class RunData:
     def as_dict(self) -> dict:
         """Serializes the RunData into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics]
-        if self.params: body['params'] = [v.as_dict() for v in self.params]
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.metrics:
+            body["metrics"] = [v.as_dict() for v in self.metrics]
+        if self.params:
+            body["params"] = [v.as_dict() for v in self.params]
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunData into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.metrics: body['metrics'] = self.metrics
-        if self.params: body['params'] = self.params
-        if self.tags: body['tags'] = self.tags
+        if self.metrics:
+            body["metrics"] = self.metrics
+        if self.params:
+            body["params"] = self.params
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunData:
         """Deserializes the RunData from a dictionary."""
-        return cls(metrics=_repeated_dict(d, 'metrics', Metric),
-                   params=_repeated_dict(d, 'params', Param),
-                   tags=_repeated_dict(d, 'tags', RunTag))
+        return cls(
+            metrics=_repeated_dict(d, "metrics", Metric),
+            params=_repeated_dict(d, "params", Param),
+            tags=_repeated_dict(d, "tags", RunTag),
+        )
 
 
 @dataclass
@@ -3650,53 +4235,73 @@ class RunInfo:
     def as_dict(self) -> dict:
         """Serializes the RunInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.status is not None: body['status'] = self.status.value
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.artifact_uri is not None:
+            body["artifact_uri"] = self.artifact_uri
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.lifecycle_stage is not None:
+            body["lifecycle_stage"] = self.lifecycle_stage
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.status is not None: body['status'] = self.status
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.artifact_uri is not None:
+            body["artifact_uri"] = self.artifact_uri
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.lifecycle_stage is not None:
+            body["lifecycle_stage"] = self.lifecycle_stage
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.status is not None:
+            body["status"] = self.status
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunInfo:
         """Deserializes the RunInfo from a dictionary."""
-        return cls(artifact_uri=d.get('artifact_uri', None),
-                   end_time=d.get('end_time', None),
-                   experiment_id=d.get('experiment_id', None),
-                   lifecycle_stage=d.get('lifecycle_stage', None),
-                   run_id=d.get('run_id', None),
-                   run_uuid=d.get('run_uuid', None),
-                   start_time=d.get('start_time', None),
-                   status=_enum(d, 'status', RunInfoStatus),
-                   user_id=d.get('user_id', None))
+        return cls(
+            artifact_uri=d.get("artifact_uri", None),
+            end_time=d.get("end_time", None),
+            experiment_id=d.get("experiment_id", None),
+            lifecycle_stage=d.get("lifecycle_stage", None),
+            run_id=d.get("run_id", None),
+            run_uuid=d.get("run_uuid", None),
+            start_time=d.get("start_time", None),
+            status=_enum(d, "status", RunInfoStatus),
+            user_id=d.get("user_id", None),
+        )
 
 
 class RunInfoStatus(Enum):
     """Current status of the run."""
 
-    FAILED = 'FAILED'
-    FINISHED = 'FINISHED'
-    KILLED = 'KILLED'
-    RUNNING = 'RUNNING'
-    SCHEDULED = 'SCHEDULED'
+    FAILED = "FAILED"
+    FINISHED = "FINISHED"
+    KILLED = "KILLED"
+    RUNNING = "RUNNING"
+    SCHEDULED = "SCHEDULED"
 
 
 @dataclass
@@ -3707,19 +4312,21 @@ class RunInputs:
     def as_dict(self) -> dict:
         """Serializes the RunInputs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dataset_inputs: body['dataset_inputs'] = [v.as_dict() for v in self.dataset_inputs]
+        if self.dataset_inputs:
+            body["dataset_inputs"] = [v.as_dict() for v in self.dataset_inputs]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunInputs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dataset_inputs: body['dataset_inputs'] = self.dataset_inputs
+        if self.dataset_inputs:
+            body["dataset_inputs"] = self.dataset_inputs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunInputs:
         """Deserializes the RunInputs from a dictionary."""
-        return cls(dataset_inputs=_repeated_dict(d, 'dataset_inputs', DatasetInput))
+        return cls(dataset_inputs=_repeated_dict(d, "dataset_inputs", DatasetInput))
 
 
 @dataclass
@@ -3733,21 +4340,25 @@ class RunTag:
     def as_dict(self) -> dict:
         """Serializes the RunTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunTag:
         """Deserializes the RunTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -3773,31 +4384,43 @@ class SearchExperiments:
     def as_dict(self) -> dict:
         """Serializes the SearchExperiments into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.filter is not None: body['filter'] = self.filter
-        if self.max_results is not None: body['max_results'] = self.max_results
-        if self.order_by: body['order_by'] = [v for v in self.order_by]
-        if self.page_token is not None: body['page_token'] = self.page_token
-        if self.view_type is not None: body['view_type'] = self.view_type.value
+        if self.filter is not None:
+            body["filter"] = self.filter
+        if self.max_results is not None:
+            body["max_results"] = self.max_results
+        if self.order_by:
+            body["order_by"] = [v for v in self.order_by]
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
+        if self.view_type is not None:
+            body["view_type"] = self.view_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.filter is not None: body['filter'] = self.filter
-        if self.max_results is not None: body['max_results'] = self.max_results
-        if self.order_by: body['order_by'] = self.order_by
-        if self.page_token is not None: body['page_token'] = self.page_token
-        if self.view_type is not None: body['view_type'] = self.view_type
+        if self.filter is not None:
+            body["filter"] = self.filter
+        if self.max_results is not None:
+            body["max_results"] = self.max_results
+        if self.order_by:
+            body["order_by"] = self.order_by
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
+        if self.view_type is not None:
+            body["view_type"] = self.view_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchExperiments:
         """Deserializes the SearchExperiments from a dictionary."""
-        return cls(filter=d.get('filter', None),
-                   max_results=d.get('max_results', None),
-                   order_by=d.get('order_by', None),
-                   page_token=d.get('page_token', None),
-                   view_type=_enum(d, 'view_type', SearchExperimentsViewType))
+        return cls(
+            filter=d.get("filter", None),
+            max_results=d.get("max_results", None),
+            order_by=d.get("order_by", None),
+            page_token=d.get("page_token", None),
+            view_type=_enum(d, "view_type", SearchExperimentsViewType),
+        )
 
 
 @dataclass
@@ -3812,31 +4435,37 @@ class SearchExperimentsResponse:
     def as_dict(self) -> dict:
         """Serializes the SearchExperimentsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiments: body['experiments'] = [v.as_dict() for v in self.experiments]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.experiments:
+            body["experiments"] = [v.as_dict() for v in self.experiments]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchExperimentsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiments: body['experiments'] = self.experiments
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.experiments:
+            body["experiments"] = self.experiments
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchExperimentsResponse:
         """Deserializes the SearchExperimentsResponse from a dictionary."""
-        return cls(experiments=_repeated_dict(d, 'experiments', Experiment),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            experiments=_repeated_dict(d, "experiments", Experiment),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 class SearchExperimentsViewType(Enum):
     """Qualifier for type of experiments to be returned. If unspecified, return only active
     experiments."""
 
-    ACTIVE_ONLY = 'ACTIVE_ONLY'
-    ALL = 'ALL'
-    DELETED_ONLY = 'DELETED_ONLY'
+    ACTIVE_ONLY = "ACTIVE_ONLY"
+    ALL = "ALL"
+    DELETED_ONLY = "DELETED_ONLY"
 
 
 @dataclass
@@ -3850,22 +4479,28 @@ class SearchModelVersionsResponse:
     def as_dict(self) -> dict:
         """Serializes the SearchModelVersionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.model_versions:
+            body["model_versions"] = [v.as_dict() for v in self.model_versions]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchModelVersionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_versions: body['model_versions'] = self.model_versions
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.model_versions:
+            body["model_versions"] = self.model_versions
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchModelVersionsResponse:
         """Deserializes the SearchModelVersionsResponse from a dictionary."""
-        return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersion),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            model_versions=_repeated_dict(d, "model_versions", ModelVersion),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -3879,22 +4514,28 @@ class SearchModelsResponse:
     def as_dict(self) -> dict:
         """Serializes the SearchModelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.registered_models:
+            body["registered_models"] = [v.as_dict() for v in self.registered_models]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchModelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.registered_models: body['registered_models'] = self.registered_models
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.registered_models:
+            body["registered_models"] = self.registered_models
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchModelsResponse:
         """Deserializes the SearchModelsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   registered_models=_repeated_dict(d, 'registered_models', Model))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            registered_models=_repeated_dict(d, "registered_models", Model),
+        )
 
 
 @dataclass
@@ -3933,34 +4574,48 @@ class SearchRuns:
     def as_dict(self) -> dict:
         """Serializes the SearchRuns into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_ids: body['experiment_ids'] = [v for v in self.experiment_ids]
-        if self.filter is not None: body['filter'] = self.filter
-        if self.max_results is not None: body['max_results'] = self.max_results
-        if self.order_by: body['order_by'] = [v for v in self.order_by]
-        if self.page_token is not None: body['page_token'] = self.page_token
-        if self.run_view_type is not None: body['run_view_type'] = self.run_view_type.value
+        if self.experiment_ids:
+            body["experiment_ids"] = [v for v in self.experiment_ids]
+        if self.filter is not None:
+            body["filter"] = self.filter
+        if self.max_results is not None:
+            body["max_results"] = self.max_results
+        if self.order_by:
+            body["order_by"] = [v for v in self.order_by]
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
+        if self.run_view_type is not None:
+            body["run_view_type"] = self.run_view_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchRuns into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_ids: body['experiment_ids'] = self.experiment_ids
-        if self.filter is not None: body['filter'] = self.filter
-        if self.max_results is not None: body['max_results'] = self.max_results
-        if self.order_by: body['order_by'] = self.order_by
-        if self.page_token is not None: body['page_token'] = self.page_token
-        if self.run_view_type is not None: body['run_view_type'] = self.run_view_type
+        if self.experiment_ids:
+            body["experiment_ids"] = self.experiment_ids
+        if self.filter is not None:
+            body["filter"] = self.filter
+        if self.max_results is not None:
+            body["max_results"] = self.max_results
+        if self.order_by:
+            body["order_by"] = self.order_by
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
+        if self.run_view_type is not None:
+            body["run_view_type"] = self.run_view_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchRuns:
         """Deserializes the SearchRuns from a dictionary."""
-        return cls(experiment_ids=d.get('experiment_ids', None),
-                   filter=d.get('filter', None),
-                   max_results=d.get('max_results', None),
-                   order_by=d.get('order_by', None),
-                   page_token=d.get('page_token', None),
-                   run_view_type=_enum(d, 'run_view_type', SearchRunsRunViewType))
+        return cls(
+            experiment_ids=d.get("experiment_ids", None),
+            filter=d.get("filter", None),
+            max_results=d.get("max_results", None),
+            order_by=d.get("order_by", None),
+            page_token=d.get("page_token", None),
+            run_view_type=_enum(d, "run_view_type", SearchRunsRunViewType),
+        )
 
 
 @dataclass
@@ -3974,29 +4629,36 @@ class SearchRunsResponse:
     def as_dict(self) -> dict:
         """Serializes the SearchRunsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.runs:
+            body["runs"] = [v.as_dict() for v in self.runs]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SearchRunsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.runs: body['runs'] = self.runs
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.runs:
+            body["runs"] = self.runs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchRunsResponse:
         """Deserializes the SearchRunsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None), runs=_repeated_dict(d, 'runs', Run))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            runs=_repeated_dict(d, "runs", Run),
+        )
 
 
 class SearchRunsRunViewType(Enum):
     """Whether to display only active, only deleted, or all runs. Defaults to only active runs."""
 
-    ACTIVE_ONLY = 'ACTIVE_ONLY'
-    ALL = 'ALL'
-    DELETED_ONLY = 'DELETED_ONLY'
+    ACTIVE_ONLY = "ACTIVE_ONLY"
+    ALL = "ALL"
+    DELETED_ONLY = "DELETED_ONLY"
 
 
 @dataclass
@@ -4015,25 +4677,33 @@ class SetExperimentTag:
     def as_dict(self) -> dict:
         """Serializes the SetExperimentTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetExperimentTag:
         """Deserializes the SetExperimentTag from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None),
-                   key=d.get('key', None),
-                   value=d.get('value', None))
+        return cls(
+            experiment_id=d.get("experiment_id", None),
+            key=d.get("key", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -4072,23 +4742,33 @@ class SetModelTagRequest:
     def as_dict(self) -> dict:
         """Serializes the SetModelTagRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.name is not None:
+            body["name"] = self.name
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.name is not None:
+            body["name"] = self.name
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelTagRequest:
         """Deserializes the SetModelTagRequest from a dictionary."""
-        return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None))
+        return cls(
+            key=d.get("key", None),
+            name=d.get("name", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -4130,28 +4810,38 @@ class SetModelVersionTagRequest:
     def as_dict(self) -> dict:
         """Serializes the SetModelVersionTagRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
-        if self.version is not None: body['version'] = self.version
+        if self.key is not None:
+            body["key"] = self.key
+        if self.name is not None:
+            body["name"] = self.name
+        if self.value is not None:
+            body["value"] = self.value
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.name is not None: body['name'] = self.name
-        if self.value is not None: body['value'] = self.value
-        if self.version is not None: body['version'] = self.version
+        if self.key is not None:
+            body["key"] = self.key
+        if self.name is not None:
+            body["name"] = self.name
+        if self.value is not None:
+            body["value"] = self.value
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagRequest:
         """Deserializes the SetModelVersionTagRequest from a dictionary."""
-        return cls(key=d.get('key', None),
-                   name=d.get('name', None),
-                   value=d.get('value', None),
-                   version=d.get('version', None))
+        return cls(
+            key=d.get("key", None),
+            name=d.get("name", None),
+            value=d.get("value", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -4193,28 +4883,38 @@ class SetTag:
     def as_dict(self) -> dict:
         """Serializes the SetTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetTag:
         """Deserializes the SetTag from a dictionary."""
-        return cls(key=d.get('key', None),
-                   run_id=d.get('run_id', None),
-                   run_uuid=d.get('run_uuid', None),
-                   value=d.get('value', None))
+        return cls(
+            key=d.get("key", None),
+            run_id=d.get("run_id", None),
+            run_uuid=d.get("run_uuid", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -4238,32 +4938,32 @@ def from_dict(cls, d: Dict[str, any]) -> SetTagResponse:
 
 class Stage(Enum):
     """Stage of the model version. Valid values are:
-    
+
     * `None`: The initial stage of a model version.
-    
+
     * `Staging`: Staging or pre-production stage.
-    
+
     * `Production`: Production stage.
-    
+
     * `Archived`: Archived stage."""
 
-    ARCHIVED = 'Archived'
-    NONE = 'None'
-    PRODUCTION = 'Production'
-    STAGING = 'Staging'
+    ARCHIVED = "Archived"
+    NONE = "None"
+    PRODUCTION = "Production"
+    STAGING = "Staging"
 
 
 class Status(Enum):
     """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register
     a new model version is pending as server performs background tasks.
-    
+
     * `FAILED_REGISTRATION`: Request to register a new model version has failed.
-    
+
     * `READY`: Model version is ready for use."""
 
-    FAILED_REGISTRATION = 'FAILED_REGISTRATION'
-    PENDING_REGISTRATION = 'PENDING_REGISTRATION'
-    READY = 'READY'
+    FAILED_REGISTRATION = "FAILED_REGISTRATION"
+    PENDING_REGISTRATION = "PENDING_REGISTRATION"
+    READY = "READY"
 
 
 @dataclass
@@ -4279,21 +4979,25 @@ class TestRegistryWebhook:
     def as_dict(self) -> dict:
         """Serializes the TestRegistryWebhook into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.body is not None: body['body'] = self.body
-        if self.status_code is not None: body['status_code'] = self.status_code
+        if self.body is not None:
+            body["body"] = self.body
+        if self.status_code is not None:
+            body["status_code"] = self.status_code
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.body is not None: body['body'] = self.body
-        if self.status_code is not None: body['status_code'] = self.status_code
+        if self.body is not None:
+            body["body"] = self.body
+        if self.status_code is not None:
+            body["status_code"] = self.status_code
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhook:
         """Deserializes the TestRegistryWebhook from a dictionary."""
-        return cls(body=d.get('body', None), status_code=d.get('status_code', None))
+        return cls(body=d.get("body", None), status_code=d.get("status_code", None))
 
 
 @dataclass
@@ -4308,21 +5012,25 @@ class TestRegistryWebhookRequest:
     def as_dict(self) -> dict:
         """Serializes the TestRegistryWebhookRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.event is not None: body['event'] = self.event.value
-        if self.id is not None: body['id'] = self.id
+        if self.event is not None:
+            body["event"] = self.event.value
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.event is not None: body['event'] = self.event
-        if self.id is not None: body['id'] = self.id
+        if self.event is not None:
+            body["event"] = self.event
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookRequest:
         """Deserializes the TestRegistryWebhookRequest from a dictionary."""
-        return cls(event=_enum(d, 'event', RegistryWebhookEvent), id=d.get('id', None))
+        return cls(event=_enum(d, "event", RegistryWebhookEvent), id=d.get("id", None))
 
 
 @dataclass
@@ -4333,19 +5041,21 @@ class TestRegistryWebhookResponse:
     def as_dict(self) -> dict:
         """Serializes the TestRegistryWebhookResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.webhook: body['webhook'] = self.webhook.as_dict()
+        if self.webhook:
+            body["webhook"] = self.webhook.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.webhook: body['webhook'] = self.webhook
+        if self.webhook:
+            body["webhook"] = self.webhook
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookResponse:
         """Deserializes the TestRegistryWebhookResponse from a dictionary."""
-        return cls(webhook=_from_dict(d, 'webhook', TestRegistryWebhook))
+        return cls(webhook=_from_dict(d, "webhook", TestRegistryWebhook))
 
 
 @dataclass
@@ -4377,32 +5087,42 @@ def as_dict(self) -> dict:
         """Serializes the TransitionModelVersionStageDatabricks into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.archive_existing_versions is not None:
-            body['archive_existing_versions'] = self.archive_existing_versions
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage.value
-        if self.version is not None: body['version'] = self.version
+            body["archive_existing_versions"] = self.archive_existing_versions
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage.value
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.archive_existing_versions is not None:
-            body['archive_existing_versions'] = self.archive_existing_versions
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.stage is not None: body['stage'] = self.stage
-        if self.version is not None: body['version'] = self.version
+            body["archive_existing_versions"] = self.archive_existing_versions
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.stage is not None:
+            body["stage"] = self.stage
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionModelVersionStageDatabricks:
         """Deserializes the TransitionModelVersionStageDatabricks from a dictionary."""
-        return cls(archive_existing_versions=d.get('archive_existing_versions', None),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   stage=_enum(d, 'stage', Stage),
-                   version=d.get('version', None))
+        return cls(
+            archive_existing_versions=d.get("archive_existing_versions", None),
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            stage=_enum(d, "stage", Stage),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -4435,31 +5155,43 @@ class TransitionRequest:
     def as_dict(self) -> dict:
         """Serializes the TransitionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.available_actions: body['available_actions'] = [v.value for v in self.available_actions]
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.to_stage is not None: body['to_stage'] = self.to_stage.value
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.available_actions:
+            body["available_actions"] = [v.value for v in self.available_actions]
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.to_stage is not None:
+            body["to_stage"] = self.to_stage.value
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TransitionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.available_actions: body['available_actions'] = self.available_actions
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.to_stage is not None: body['to_stage'] = self.to_stage
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.available_actions:
+            body["available_actions"] = self.available_actions
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.to_stage is not None:
+            body["to_stage"] = self.to_stage
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionRequest:
         """Deserializes the TransitionRequest from a dictionary."""
-        return cls(available_actions=_repeated_enum(d, 'available_actions', ActivityAction),
-                   comment=d.get('comment', None),
-                   creation_timestamp=d.get('creation_timestamp', None),
-                   to_stage=_enum(d, 'to_stage', Stage),
-                   user_id=d.get('user_id', None))
+        return cls(
+            available_actions=_repeated_enum(d, "available_actions", ActivityAction),
+            comment=d.get("comment", None),
+            creation_timestamp=d.get("creation_timestamp", None),
+            to_stage=_enum(d, "to_stage", Stage),
+            user_id=d.get("user_id", None),
+        )
 
 
 @dataclass
@@ -4469,19 +5201,21 @@ class TransitionStageResponse:
     def as_dict(self) -> dict:
         """Serializes the TransitionStageResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_version: body['model_version'] = self.model_version.as_dict()
+        if self.model_version:
+            body["model_version"] = self.model_version.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_version: body['model_version'] = self.model_version
+        if self.model_version:
+            body["model_version"] = self.model_version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionStageResponse:
         """Deserializes the TransitionStageResponse from a dictionary."""
-        return cls(model_version=_from_dict(d, 'model_version', ModelVersionDatabricks))
+        return cls(model_version=_from_dict(d, "model_version", ModelVersionDatabricks))
 
 
 @dataclass
@@ -4495,21 +5229,25 @@ class UpdateComment:
     def as_dict(self) -> dict:
         """Serializes the UpdateComment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.id is not None: body['id'] = self.id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateComment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.id is not None: body['id'] = self.id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateComment:
         """Deserializes the UpdateComment from a dictionary."""
-        return cls(comment=d.get('comment', None), id=d.get('id', None))
+        return cls(comment=d.get("comment", None), id=d.get("id", None))
 
 
 @dataclass
@@ -4520,19 +5258,21 @@ class UpdateCommentResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateCommentResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment: body['comment'] = self.comment.as_dict()
+        if self.comment:
+            body["comment"] = self.comment.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCommentResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment: body['comment'] = self.comment
+        if self.comment:
+            body["comment"] = self.comment
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCommentResponse:
         """Deserializes the UpdateCommentResponse from a dictionary."""
-        return cls(comment=_from_dict(d, 'comment', CommentObject))
+        return cls(comment=_from_dict(d, "comment", CommentObject))
 
 
 @dataclass
@@ -4546,21 +5286,28 @@ class UpdateExperiment:
     def as_dict(self) -> dict:
         """Serializes the UpdateExperiment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
-        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.experiment_id is not None:
+            body["experiment_id"] = self.experiment_id
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExperiment:
         """Deserializes the UpdateExperiment from a dictionary."""
-        return cls(experiment_id=d.get('experiment_id', None), new_name=d.get('new_name', None))
+        return cls(
+            experiment_id=d.get("experiment_id", None),
+            new_name=d.get("new_name", None),
+        )
 
 
 @dataclass
@@ -4593,21 +5340,25 @@ class UpdateModelRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateModelRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelRequest:
         """Deserializes the UpdateModelRequest from a dictionary."""
-        return cls(description=d.get('description', None), name=d.get('name', None))
+        return cls(description=d.get("description", None), name=d.get("name", None))
 
 
 @dataclass
@@ -4643,25 +5394,33 @@ class UpdateModelVersionRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.version is not None: body['version'] = self.version
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.version is not None: body['version'] = self.version
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.version is not None:
+            body["version"] = self.version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest:
         """Deserializes the UpdateModelVersionRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   version=d.get('version', None))
+        return cls(
+            description=d.get("description", None),
+            name=d.get("name", None),
+            version=d.get("version", None),
+        )
 
 
 @dataclass
@@ -4737,34 +5496,48 @@ class UpdateRegistryWebhook:
     def as_dict(self) -> dict:
         """Serializes the UpdateRegistryWebhook into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.events: body['events'] = [v.value for v in self.events]
-        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.job_spec: body['job_spec'] = self.job_spec.as_dict()
-        if self.status is not None: body['status'] = self.status.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.events:
+            body["events"] = [v.value for v in self.events]
+        if self.http_url_spec:
+            body["http_url_spec"] = self.http_url_spec.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.job_spec:
+            body["job_spec"] = self.job_spec.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.events: body['events'] = self.events
-        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
-        if self.id is not None: body['id'] = self.id
-        if self.job_spec: body['job_spec'] = self.job_spec
-        if self.status is not None: body['status'] = self.status
+        if self.description is not None:
+            body["description"] = self.description
+        if self.events:
+            body["events"] = self.events
+        if self.http_url_spec:
+            body["http_url_spec"] = self.http_url_spec
+        if self.id is not None:
+            body["id"] = self.id
+        if self.job_spec:
+            body["job_spec"] = self.job_spec
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRegistryWebhook:
         """Deserializes the UpdateRegistryWebhook from a dictionary."""
-        return cls(description=d.get('description', None),
-                   events=_repeated_enum(d, 'events', RegistryWebhookEvent),
-                   http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpec),
-                   id=d.get('id', None),
-                   job_spec=_from_dict(d, 'job_spec', JobSpec),
-                   status=_enum(d, 'status', RegistryWebhookStatus))
+        return cls(
+            description=d.get("description", None),
+            events=_repeated_enum(d, "events", RegistryWebhookEvent),
+            http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec),
+            id=d.get("id", None),
+            job_spec=_from_dict(d, "job_spec", JobSpec),
+            status=_enum(d, "status", RegistryWebhookStatus),
+        )
 
 
 @dataclass
@@ -4785,28 +5558,38 @@ class UpdateRun:
     def as_dict(self) -> dict:
         """Serializes the UpdateRun into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.status is not None: body['status'] = self.status.value
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRun into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.end_time is not None: body['end_time'] = self.end_time
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
-        if self.status is not None: body['status'] = self.status
+        if self.end_time is not None:
+            body["end_time"] = self.end_time
+        if self.run_id is not None:
+            body["run_id"] = self.run_id
+        if self.run_uuid is not None:
+            body["run_uuid"] = self.run_uuid
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRun:
         """Deserializes the UpdateRun from a dictionary."""
-        return cls(end_time=d.get('end_time', None),
-                   run_id=d.get('run_id', None),
-                   run_uuid=d.get('run_uuid', None),
-                   status=_enum(d, 'status', UpdateRunStatus))
+        return cls(
+            end_time=d.get("end_time", None),
+            run_id=d.get("run_id", None),
+            run_uuid=d.get("run_uuid", None),
+            status=_enum(d, "status", UpdateRunStatus),
+        )
 
 
 @dataclass
@@ -4817,29 +5600,31 @@ class UpdateRunResponse:
     def as_dict(self) -> dict:
         """Serializes the UpdateRunResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.run_info: body['run_info'] = self.run_info.as_dict()
+        if self.run_info:
+            body["run_info"] = self.run_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRunResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.run_info: body['run_info'] = self.run_info
+        if self.run_info:
+            body["run_info"] = self.run_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRunResponse:
         """Deserializes the UpdateRunResponse from a dictionary."""
-        return cls(run_info=_from_dict(d, 'run_info', RunInfo))
+        return cls(run_info=_from_dict(d, "run_info", RunInfo))
 
 
 class UpdateRunStatus(Enum):
     """Updated status of the run."""
 
-    FAILED = 'FAILED'
-    FINISHED = 'FINISHED'
-    KILLED = 'KILLED'
-    RUNNING = 'RUNNING'
-    SCHEDULED = 'SCHEDULED'
+    FAILED = "FAILED"
+    FINISHED = "FINISHED"
+    KILLED = "KILLED"
+    RUNNING = "RUNNING"
+    SCHEDULED = "SCHEDULED"
 
 
 @dataclass
@@ -4865,26 +5650,29 @@ class ExperimentsAPI:
     """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each
     experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for
     analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server.
-    
+
     Experiments are located in the workspace file tree. You manage experiments using the same tools you use to
-    manage other workspace objects such as folders, notebooks, and libraries."""
+    manage other workspace objects such as folders, notebooks, and libraries.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create_experiment(self,
-                          name: str,
-                          *,
-                          artifact_location: Optional[str] = None,
-                          tags: Optional[List[ExperimentTag]] = None) -> CreateExperimentResponse:
+    def create_experiment(
+        self,
+        name: str,
+        *,
+        artifact_location: Optional[str] = None,
+        tags: Optional[List[ExperimentTag]] = None,
+    ) -> CreateExperimentResponse:
         """Create experiment.
-        
+
         Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that
         another experiment with the same name does not already exist and fails if another experiment with the
         same name already exists.
-        
+
         Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists.
-        
+
         :param name: str
           Experiment name.
         :param artifact_location: str (optional)
@@ -4895,30 +5683,43 @@ def create_experiment(self,
           depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250
           bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to
           support up to 20 tags per request.
-        
+
         :returns: :class:`CreateExperimentResponse`
         """
         body = {}
-        if artifact_location is not None: body['artifact_location'] = artifact_location
-        if name is not None: body['name'] = name
-        if tags is not None: body['tags'] = [v.as_dict() for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/experiments/create', body=body, headers=headers)
+        if artifact_location is not None:
+            body["artifact_location"] = artifact_location
+        if name is not None:
+            body["name"] = name
+        if tags is not None:
+            body["tags"] = [v.as_dict() for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/experiments/create",
+            body=body,
+            headers=headers,
+        )
         return CreateExperimentResponse.from_dict(res)
 
-    def create_run(self,
-                   *,
-                   experiment_id: Optional[str] = None,
-                   start_time: Optional[int] = None,
-                   tags: Optional[List[RunTag]] = None,
-                   user_id: Optional[str] = None) -> CreateRunResponse:
+    def create_run(
+        self,
+        *,
+        experiment_id: Optional[str] = None,
+        start_time: Optional[int] = None,
+        tags: Optional[List[RunTag]] = None,
+        user_id: Optional[str] = None,
+    ) -> CreateRunResponse:
         """Create a run.
-        
+
         Creates a new run within an experiment. A run is usually a single execution of a machine learning or
         data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag`
         associated with a single execution.
-        
+
         :param experiment_id: str (optional)
           ID of the associated experiment.
         :param start_time: int (optional)
@@ -4928,63 +5729,85 @@ def create_run(self,
         :param user_id: str (optional)
           ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in
           a future MLflow release. Use 'mlflow.user' tag instead.
-        
+
         :returns: :class:`CreateRunResponse`
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        if start_time is not None: body['start_time'] = start_time
-        if tags is not None: body['tags'] = [v.as_dict() for v in tags]
-        if user_id is not None: body['user_id'] = user_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/runs/create', body=body, headers=headers)
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        if start_time is not None:
+            body["start_time"] = start_time
+        if tags is not None:
+            body["tags"] = [v.as_dict() for v in tags]
+        if user_id is not None:
+            body["user_id"] = user_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/mlflow/runs/create", body=body, headers=headers)
         return CreateRunResponse.from_dict(res)
 
     def delete_experiment(self, experiment_id: str):
         """Delete an experiment.
-        
+
         Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the
         experiment uses FileStore, artifacts associated with experiment are also deleted.
-        
+
         :param experiment_id: str
           ID of the associated experiment.
-        
-        
+
+
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/mlflow/experiments/delete', body=body, headers=headers)
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/experiments/delete",
+            body=body,
+            headers=headers,
+        )
 
     def delete_run(self, run_id: str):
         """Delete a run.
-        
+
         Marks a run for deletion.
-        
+
         :param run_id: str
           ID of the run to delete.
-        
-        
+
+
         """
         body = {}
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/mlflow/runs/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/mlflow/runs/delete", body=body, headers=headers)
 
-    def delete_runs(self,
-                    experiment_id: str,
-                    max_timestamp_millis: int,
-                    *,
-                    max_runs: Optional[int] = None) -> DeleteRunsResponse:
+    def delete_runs(
+        self,
+        experiment_id: str,
+        max_timestamp_millis: int,
+        *,
+        max_runs: Optional[int] = None,
+    ) -> DeleteRunsResponse:
         """Delete runs by creation time.
-        
+
         Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at
         most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
         client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete.
-        
+
         :param experiment_id: str
           The ID of the experiment containing the runs to delete.
         :param max_timestamp_millis: int
@@ -4993,91 +5816,130 @@ def delete_runs(self,
         :param max_runs: int (optional)
           An optional positive integer indicating the maximum number of runs to delete. The maximum allowed
           value for max_runs is 10000.
-        
+
         :returns: :class:`DeleteRunsResponse`
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        if max_runs is not None: body['max_runs'] = max_runs
-        if max_timestamp_millis is not None: body['max_timestamp_millis'] = max_timestamp_millis
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/databricks/runs/delete-runs', body=body, headers=headers)
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        if max_runs is not None:
+            body["max_runs"] = max_runs
+        if max_timestamp_millis is not None:
+            body["max_timestamp_millis"] = max_timestamp_millis
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/databricks/runs/delete-runs",
+            body=body,
+            headers=headers,
+        )
         return DeleteRunsResponse.from_dict(res)
 
     def delete_tag(self, run_id: str, key: str):
         """Delete a tag.
-        
+
         Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run
         completes.
-        
+
         :param run_id: str
           ID of the run that the tag was logged under. Must be provided.
         :param key: str
           Name of the tag. Maximum size is 255 bytes. Must be provided.
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/runs/delete-tag', body=body, headers=headers)
+        if key is not None:
+            body["key"] = key
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/runs/delete-tag",
+            body=body,
+            headers=headers,
+        )
 
     def get_by_name(self, experiment_name: str) -> GetExperimentResponse:
         """Get metadata.
-        
+
         Gets metadata for an experiment.
-        
+
         This endpoint will return deleted experiments, but prefers the active experiment if an active and
         deleted experiment share the same name. If multiple deleted experiments share the same name, the API
         will return one of them.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists.
-        
+
         :param experiment_name: str
           Name of the associated experiment.
-        
+
         :returns: :class:`GetExperimentResponse`
         """
 
         query = {}
-        if experiment_name is not None: query['experiment_name'] = experiment_name
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/mlflow/experiments/get-by-name', query=query, headers=headers)
+        if experiment_name is not None:
+            query["experiment_name"] = experiment_name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/mlflow/experiments/get-by-name",
+            query=query,
+            headers=headers,
+        )
         return GetExperimentResponse.from_dict(res)
 
     def get_experiment(self, experiment_id: str) -> GetExperimentResponse:
         """Get an experiment.
-        
+
         Gets metadata for an experiment. This method works on deleted experiments.
-        
+
         :param experiment_id: str
           ID of the associated experiment.
-        
+
         :returns: :class:`GetExperimentResponse`
         """
 
         query = {}
-        if experiment_id is not None: query['experiment_id'] = experiment_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/mlflow/experiments/get', query=query, headers=headers)
+        if experiment_id is not None:
+            query["experiment_id"] = experiment_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/mlflow/experiments/get",
+            query=query,
+            headers=headers,
+        )
         return GetExperimentResponse.from_dict(res)
 
-    def get_history(self,
-                    metric_key: str,
-                    *,
-                    max_results: Optional[int] = None,
-                    page_token: Optional[str] = None,
-                    run_id: Optional[str] = None,
-                    run_uuid: Optional[str] = None) -> Iterator[Metric]:
+    def get_history(
+        self,
+        metric_key: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+    ) -> Iterator[Metric]:
         """Get history of a given metric within a run.
-        
+
         Gets a list of all values for the specified metric for a given run.
-        
+
         :param metric_key: str
           Name of the metric.
         :param max_results: int (optional)
@@ -5090,100 +5952,128 @@ def get_history(self,
         :param run_uuid: str (optional)
           [Deprecated, use run_id instead] ID of the run from which to fetch metric values. This field will be
           removed in a future MLflow version.
-        
+
         :returns: Iterator over :class:`Metric`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if metric_key is not None: query['metric_key'] = metric_key
-        if page_token is not None: query['page_token'] = page_token
-        if run_id is not None: query['run_id'] = run_id
-        if run_uuid is not None: query['run_uuid'] = run_uuid
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if metric_key is not None:
+            query["metric_key"] = metric_key
+        if page_token is not None:
+            query["page_token"] = page_token
+        if run_id is not None:
+            query["run_id"] = run_id
+        if run_uuid is not None:
+            query["run_uuid"] = run_uuid
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/mlflow/metrics/get-history', query=query, headers=headers)
-            if 'metrics' in json:
-                for v in json['metrics']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/metrics/get-history",
+                query=query,
+                headers=headers,
+            )
+            if "metrics" in json:
+                for v in json["metrics"]:
                     yield Metric.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse:
         """Get experiment permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param experiment_id: str
           The experiment for which to get or manage permissions.
-        
+
         :returns: :class:`GetExperimentPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/experiments/{experiment_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/experiments/{experiment_id}/permissionLevels",
+            headers=headers,
+        )
         return GetExperimentPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, experiment_id: str) -> ExperimentPermissions:
         """Get experiment permissions.
-        
+
         Gets the permissions of an experiment. Experiments can inherit permissions from their root object.
-        
+
         :param experiment_id: str
           The experiment for which to get or manage permissions.
-        
+
         :returns: :class:`ExperimentPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/experiments/{experiment_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/experiments/{experiment_id}",
+            headers=headers,
+        )
         return ExperimentPermissions.from_dict(res)
 
     def get_run(self, run_id: str, *, run_uuid: Optional[str] = None) -> GetRunResponse:
         """Get a run.
-        
+
         Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the
         same key are logged for a run, return only the value with the latest timestamp.
-        
+
         If there are multiple values with the latest timestamp, return the maximum of these values.
-        
+
         :param run_id: str
           ID of the run to fetch. Must be provided.
         :param run_uuid: str (optional)
           [Deprecated, use run_id instead] ID of the run to fetch. This field will be removed in a future
           MLflow version.
-        
+
         :returns: :class:`GetRunResponse`
         """
 
         query = {}
-        if run_id is not None: query['run_id'] = run_id
-        if run_uuid is not None: query['run_uuid'] = run_uuid
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/mlflow/runs/get', query=query, headers=headers)
+        if run_id is not None:
+            query["run_id"] = run_id
+        if run_uuid is not None:
+            query["run_uuid"] = run_uuid
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.0/mlflow/runs/get", query=query, headers=headers)
         return GetRunResponse.from_dict(res)
 
-    def list_artifacts(self,
-                       *,
-                       page_token: Optional[str] = None,
-                       path: Optional[str] = None,
-                       run_id: Optional[str] = None,
-                       run_uuid: Optional[str] = None) -> Iterator[FileInfo]:
+    def list_artifacts(
+        self,
+        *,
+        page_token: Optional[str] = None,
+        path: Optional[str] = None,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+    ) -> Iterator[FileInfo]:
         """Get all artifacts.
-        
+
         List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
         contains only artifacts with the specified prefix. This API does not support pagination when listing
         artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
         `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
         pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
-        
+
         :param page_token: str (optional)
           Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
           artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
@@ -5196,35 +6086,48 @@ def list_artifacts(self,
         :param run_uuid: str (optional)
           [Deprecated, use run_id instead] ID of the run whose artifacts to list. This field will be removed
           in a future MLflow version.
-        
+
         :returns: Iterator over :class:`FileInfo`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        if path is not None: query['path'] = path
-        if run_id is not None: query['run_id'] = run_id
-        if run_uuid is not None: query['run_uuid'] = run_uuid
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        if path is not None:
+            query["path"] = path
+        if run_id is not None:
+            query["run_id"] = run_id
+        if run_uuid is not None:
+            query["run_uuid"] = run_uuid
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/mlflow/artifacts/list', query=query, headers=headers)
-            if 'files' in json:
-                for v in json['files']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/artifacts/list",
+                query=query,
+                headers=headers,
+            )
+            if "files" in json:
+                for v in json["files"]:
                     yield FileInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
-    def list_experiments(self,
-                         *,
-                         max_results: Optional[int] = None,
-                         page_token: Optional[str] = None,
-                         view_type: Optional[str] = None) -> Iterator[Experiment]:
+    def list_experiments(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+        view_type: Optional[str] = None,
+    ) -> Iterator[Experiment]:
         """List experiments.
-        
+
         Gets a list of all experiments.
-        
+
         :param max_results: int (optional)
           Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If
           `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are
@@ -5233,68 +6136,80 @@ def list_experiments(self,
           Token indicating the page of experiments to fetch
         :param view_type: str (optional)
           Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
-        
+
         :returns: Iterator over :class:`Experiment`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if view_type is not None: query['view_type'] = view_type
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if view_type is not None:
+            query["view_type"] = view_type
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/mlflow/experiments/list', query=query, headers=headers)
-            if 'experiments' in json:
-                for v in json['experiments']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/experiments/list",
+                query=query,
+                headers=headers,
+            )
+            if "experiments" in json:
+                for v in json["experiments"]:
                     yield Experiment.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def log_batch(self,
-                  *,
-                  metrics: Optional[List[Metric]] = None,
-                  params: Optional[List[Param]] = None,
-                  run_id: Optional[str] = None,
-                  tags: Optional[List[RunTag]] = None):
+            query["page_token"] = json["next_page_token"]
+
+    def log_batch(
+        self,
+        *,
+        metrics: Optional[List[Metric]] = None,
+        params: Optional[List[Param]] = None,
+        run_id: Optional[str] = None,
+        tags: Optional[List[RunTag]] = None,
+    ):
         """Log a batch.
-        
+
         Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server
         will respond with an error (non-200 status code).
-        
+
         In case of error (due to internal server error or an invalid request), partial data may be written.
-        
+
         You can write metrics, params, and tags in interleaving fashion, but within a given entity type are
         guaranteed to follow the order specified in the request body.
-        
+
         The overwrite behavior for metrics, params, and tags is as follows:
-        
+
         * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to
         the set of values for the metric with the provided key.
-        
+
         * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple
         tag values with the same key are provided in the same API request, the last-provided tag value is
         written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent.
-        
+
         * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will
         result in an error). However, logging the same param (key, value) is permitted. Specifically, logging
         a param is idempotent.
-        
+
         Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB
         in size and contain:
-        
+
         * No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 params * Up to
         100 tags
-        
+
         For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900
         metrics, 50 params, and 51 tags is invalid.
-        
+
         The following limits also apply to metric, param, and tag keys and values:
-        
+
         * Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter and tag
         values can be up to 250 characters in length
-        
+
         :param metrics: List[:class:`Metric`] (optional)
           Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and
           tags in total.
@@ -5306,51 +6221,80 @@ def log_batch(self,
         :param tags: List[:class:`RunTag`] (optional)
           Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags
           in total.
-        
-        
-        """
-        body = {}
-        if metrics is not None: body['metrics'] = [v.as_dict() for v in metrics]
-        if params is not None: body['params'] = [v.as_dict() for v in params]
-        if run_id is not None: body['run_id'] = run_id
-        if tags is not None: body['tags'] = [v.as_dict() for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/mlflow/runs/log-batch', body=body, headers=headers)
 
-    def log_inputs(self, *, datasets: Optional[List[DatasetInput]] = None, run_id: Optional[str] = None):
+        """
+        body = {}
+        if metrics is not None:
+            body["metrics"] = [v.as_dict() for v in metrics]
+        if params is not None:
+            body["params"] = [v.as_dict() for v in params]
+        if run_id is not None:
+            body["run_id"] = run_id
+        if tags is not None:
+            body["tags"] = [v.as_dict() for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/runs/log-batch",
+            body=body,
+            headers=headers,
+        )
+
+    def log_inputs(
+        self,
+        *,
+        datasets: Optional[List[DatasetInput]] = None,
+        run_id: Optional[str] = None,
+    ):
         """Log inputs to a run.
-        
+
         **NOTE:** Experimental: This API may change or be removed in a future release without warning.
-        
+
         :param datasets: List[:class:`DatasetInput`] (optional)
           Dataset inputs
         :param run_id: str (optional)
           ID of the run to log under
-        
-        
-        """
-        body = {}
-        if datasets is not None: body['datasets'] = [v.as_dict() for v in datasets]
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/mlflow/runs/log-inputs', body=body, headers=headers)
 
-    def log_metric(self,
-                   key: str,
-                   value: float,
-                   timestamp: int,
-                   *,
-                   run_id: Optional[str] = None,
-                   run_uuid: Optional[str] = None,
-                   step: Optional[int] = None):
+        """
+        body = {}
+        if datasets is not None:
+            body["datasets"] = [v.as_dict() for v in datasets]
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/runs/log-inputs",
+            body=body,
+            headers=headers,
+        )
+
+    def log_metric(
+        self,
+        key: str,
+        value: float,
+        timestamp: int,
+        *,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+        step: Optional[int] = None,
+    ):
         """Log a metric.
-        
+
         Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated
         timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be
         logged multiple times.
-        
+
         :param key: str
           Name of the metric.
         :param value: float
@@ -5364,51 +6308,77 @@ def log_metric(self,
           removed in a future MLflow version.
         :param step: int (optional)
           Step at which to log the metric
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if run_id is not None: body['run_id'] = run_id
-        if run_uuid is not None: body['run_uuid'] = run_uuid
-        if step is not None: body['step'] = step
-        if timestamp is not None: body['timestamp'] = timestamp
-        if value is not None: body['value'] = value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/runs/log-metric', body=body, headers=headers)
+        if key is not None:
+            body["key"] = key
+        if run_id is not None:
+            body["run_id"] = run_id
+        if run_uuid is not None:
+            body["run_uuid"] = run_uuid
+        if step is not None:
+            body["step"] = step
+        if timestamp is not None:
+            body["timestamp"] = timestamp
+        if value is not None:
+            body["value"] = value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/runs/log-metric",
+            body=body,
+            headers=headers,
+        )
 
     def log_model(self, *, model_json: Optional[str] = None, run_id: Optional[str] = None):
         """Log a model.
-        
+
         **NOTE:** Experimental: This API may change or be removed in a future release without warning.
-        
+
         :param model_json: str (optional)
           MLmodel file in json format.
         :param run_id: str (optional)
           ID of the run to log under
-        
-        
-        """
-        body = {}
-        if model_json is not None: body['model_json'] = model_json
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', '/api/2.0/mlflow/runs/log-model', body=body, headers=headers)
 
-    def log_param(self,
-                  key: str,
-                  value: str,
-                  *,
-                  run_id: Optional[str] = None,
-                  run_uuid: Optional[str] = None):
+        """
+        body = {}
+        if model_json is not None:
+            body["model_json"] = model_json
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/runs/log-model",
+            body=body,
+            headers=headers,
+        )
+
+    def log_param(
+        self,
+        key: str,
+        value: str,
+        *,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+    ):
         """Log a param.
-        
+
         Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include
         hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A
         param can be logged only once for a run.
-        
+
         :param key: str
           Name of the param. Maximum size is 255 bytes.
         :param value: str
@@ -5418,65 +6388,92 @@ def log_param(self,
         :param run_uuid: str (optional)
           [Deprecated, use run_id instead] ID of the run under which to log the param. This field will be
           removed in a future MLflow version.
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if run_id is not None: body['run_id'] = run_id
-        if run_uuid is not None: body['run_uuid'] = run_uuid
-        if value is not None: body['value'] = value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/runs/log-parameter', body=body, headers=headers)
+        if key is not None:
+            body["key"] = key
+        if run_id is not None:
+            body["run_id"] = run_id
+        if run_uuid is not None:
+            body["run_uuid"] = run_uuid
+        if value is not None:
+            body["value"] = value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/runs/log-parameter",
+            body=body,
+            headers=headers,
+        )
 
     def restore_experiment(self, experiment_id: str):
         """Restores an experiment.
-        
+
         Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics,
         params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are
         also restored.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted.
-        
+
         :param experiment_id: str
           ID of the associated experiment.
-        
-        
+
+
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/mlflow/experiments/restore', body=body, headers=headers)
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/experiments/restore",
+            body=body,
+            headers=headers,
+        )
 
     def restore_run(self, run_id: str):
         """Restore a run.
-        
+
         Restores a deleted run.
-        
+
         :param run_id: str
           ID of the run to restore.
-        
-        
+
+
         """
         body = {}
-        if run_id is not None: body['run_id'] = run_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if run_id is not None:
+            body["run_id"] = run_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/mlflow/runs/restore', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/mlflow/runs/restore", body=body, headers=headers)
 
-    def restore_runs(self,
-                     experiment_id: str,
-                     min_timestamp_millis: int,
-                     *,
-                     max_runs: Optional[int] = None) -> RestoreRunsResponse:
+    def restore_runs(
+        self,
+        experiment_id: str,
+        min_timestamp_millis: int,
+        *,
+        max_runs: Optional[int] = None,
+    ) -> RestoreRunsResponse:
         """Restore runs by deletion time.
-        
+
         Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores
         at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
         client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore.
-        
+
         :param experiment_id: str
           The ID of the experiment containing the runs to restore.
         :param min_timestamp_millis: int
@@ -5485,29 +6482,42 @@ def restore_runs(self,
         :param max_runs: int (optional)
           An optional positive integer indicating the maximum number of runs to restore. The maximum allowed
           value for max_runs is 10000.
-        
+
         :returns: :class:`RestoreRunsResponse`
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        if max_runs is not None: body['max_runs'] = max_runs
-        if min_timestamp_millis is not None: body['min_timestamp_millis'] = min_timestamp_millis
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/databricks/runs/restore-runs', body=body, headers=headers)
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        if max_runs is not None:
+            body["max_runs"] = max_runs
+        if min_timestamp_millis is not None:
+            body["min_timestamp_millis"] = min_timestamp_millis
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/databricks/runs/restore-runs",
+            body=body,
+            headers=headers,
+        )
         return RestoreRunsResponse.from_dict(res)
 
-    def search_experiments(self,
-                           *,
-                           filter: Optional[str] = None,
-                           max_results: Optional[int] = None,
-                           order_by: Optional[List[str]] = None,
-                           page_token: Optional[str] = None,
-                           view_type: Optional[SearchExperimentsViewType] = None) -> Iterator[Experiment]:
+    def search_experiments(
+        self,
+        *,
+        filter: Optional[str] = None,
+        max_results: Optional[int] = None,
+        order_by: Optional[List[str]] = None,
+        page_token: Optional[str] = None,
+        view_type: Optional[SearchExperimentsViewType] = None,
+    ) -> Iterator[Experiment]:
         """Search experiments.
-        
+
         Searches for experiments that satisfy specified search criteria.
-        
+
         :param filter: str (optional)
           String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")
         :param max_results: int (optional)
@@ -5520,52 +6530,67 @@ def search_experiments(self,
           Token indicating the page of experiments to fetch
         :param view_type: :class:`SearchExperimentsViewType` (optional)
           Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
-        
+
         :returns: Iterator over :class:`Experiment`
         """
         body = {}
-        if filter is not None: body['filter'] = filter
-        if max_results is not None: body['max_results'] = max_results
-        if order_by is not None: body['order_by'] = [v for v in order_by]
-        if page_token is not None: body['page_token'] = page_token
-        if view_type is not None: body['view_type'] = view_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if filter is not None:
+            body["filter"] = filter
+        if max_results is not None:
+            body["max_results"] = max_results
+        if order_by is not None:
+            body["order_by"] = [v for v in order_by]
+        if page_token is not None:
+            body["page_token"] = page_token
+        if view_type is not None:
+            body["view_type"] = view_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         while True:
-            json = self._api.do('POST', '/api/2.0/mlflow/experiments/search', body=body, headers=headers)
-            if 'experiments' in json:
-                for v in json['experiments']:
+            json = self._api.do(
+                "POST",
+                "/api/2.0/mlflow/experiments/search",
+                body=body,
+                headers=headers,
+            )
+            if "experiments" in json:
+                for v in json["experiments"]:
                     yield Experiment.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            body['page_token'] = json['next_page_token']
-
-    def search_runs(self,
-                    *,
-                    experiment_ids: Optional[List[str]] = None,
-                    filter: Optional[str] = None,
-                    max_results: Optional[int] = None,
-                    order_by: Optional[List[str]] = None,
-                    page_token: Optional[str] = None,
-                    run_view_type: Optional[SearchRunsRunViewType] = None) -> Iterator[Run]:
+            body["page_token"] = json["next_page_token"]
+
+    def search_runs(
+        self,
+        *,
+        experiment_ids: Optional[List[str]] = None,
+        filter: Optional[str] = None,
+        max_results: Optional[int] = None,
+        order_by: Optional[List[str]] = None,
+        page_token: Optional[str] = None,
+        run_view_type: Optional[SearchRunsRunViewType] = None,
+    ) -> Iterator[Run]:
         """Search for runs.
-        
+
         Searches for runs that satisfy expressions.
-        
+
         Search expressions can use `mlflowMetric` and `mlflowParam` keys.",
-        
+
         :param experiment_ids: List[str] (optional)
           List of experiment IDs to search over.
         :param filter: str (optional)
           A filter expression over params, metrics, and tags, that allows returning a subset of runs. The
           syntax is a subset of SQL that supports ANDing together binary operations between a param, metric,
           or tag and a constant.
-          
+
           Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'`
-          
+
           You can select columns with special characters (hyphen, space, period, etc.) by using double quotes:
           `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'`
-          
+
           Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.
         :param max_results: int (optional)
           Maximum number of runs desired. Max threshold is 50000
@@ -5579,32 +6604,46 @@ def search_runs(self,
           Token for the current page of runs.
         :param run_view_type: :class:`SearchRunsRunViewType` (optional)
           Whether to display only active, only deleted, or all runs. Defaults to only active runs.
-        
+
         :returns: Iterator over :class:`Run`
         """
         body = {}
-        if experiment_ids is not None: body['experiment_ids'] = [v for v in experiment_ids]
-        if filter is not None: body['filter'] = filter
-        if max_results is not None: body['max_results'] = max_results
-        if order_by is not None: body['order_by'] = [v for v in order_by]
-        if page_token is not None: body['page_token'] = page_token
-        if run_view_type is not None: body['run_view_type'] = run_view_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if experiment_ids is not None:
+            body["experiment_ids"] = [v for v in experiment_ids]
+        if filter is not None:
+            body["filter"] = filter
+        if max_results is not None:
+            body["max_results"] = max_results
+        if order_by is not None:
+            body["order_by"] = [v for v in order_by]
+        if page_token is not None:
+            body["page_token"] = page_token
+        if run_view_type is not None:
+            body["run_view_type"] = run_view_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         while True:
-            json = self._api.do('POST', '/api/2.0/mlflow/runs/search', body=body, headers=headers)
-            if 'runs' in json:
-                for v in json['runs']:
+            json = self._api.do(
+                "POST",
+                "/api/2.0/mlflow/runs/search",
+                body=body,
+                headers=headers,
+            )
+            if "runs" in json:
+                for v in json["runs"]:
                     yield Run.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            body['page_token'] = json['next_page_token']
+            body["page_token"] = json["next_page_token"]
 
     def set_experiment_tag(self, experiment_id: str, key: str, value: str):
         """Set a tag.
-        
+
         Sets a tag on an experiment. Experiment tags are metadata that can be updated.
-        
+
         :param experiment_id: str
           ID of the experiment under which to log the tag. Must be provided.
         :param key: str
@@ -5613,50 +6652,73 @@ def set_experiment_tag(self, experiment_id: str, key: str, value: str):
         :param value: str
           String value of the tag being logged. Maximum size depends on storage backend. All storage backends
           are guaranteed to support key values up to 5000 bytes in size.
-        
-        
+
+
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        if key is not None: body['key'] = key
-        if value is not None: body['value'] = value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/experiments/set-experiment-tag', body=body, headers=headers)
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        if key is not None:
+            body["key"] = key
+        if value is not None:
+            body["value"] = value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/experiments/set-experiment-tag",
+            body=body,
+            headers=headers,
+        )
 
     def set_permissions(
-            self,
-            experiment_id: str,
-            *,
-            access_control_list: Optional[List[ExperimentAccessControlRequest]] = None
+        self,
+        experiment_id: str,
+        *,
+        access_control_list: Optional[List[ExperimentAccessControlRequest]] = None,
     ) -> ExperimentPermissions:
         """Set experiment permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param experiment_id: str
           The experiment for which to get or manage permissions.
         :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ExperimentPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/experiments/{experiment_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/experiments/{experiment_id}",
+            body=body,
+            headers=headers,
+        )
         return ExperimentPermissions.from_dict(res)
 
-    def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None):
+    def set_tag(
+        self,
+        key: str,
+        value: str,
+        *,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+    ):
         """Set a tag.
-        
+
         Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.
-        
+
         :param key: str
           Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
           support key values up to 250 bytes in size.
@@ -5668,74 +6730,98 @@ def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uui
         :param run_uuid: str (optional)
           [Deprecated, use run_id instead] ID of the run under which to log the tag. This field will be
           removed in a future MLflow version.
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if run_id is not None: body['run_id'] = run_id
-        if run_uuid is not None: body['run_uuid'] = run_uuid
-        if value is not None: body['value'] = value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/runs/set-tag', body=body, headers=headers)
+        if key is not None:
+            body["key"] = key
+        if run_id is not None:
+            body["run_id"] = run_id
+        if run_uuid is not None:
+            body["run_uuid"] = run_uuid
+        if value is not None:
+            body["value"] = value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do("POST", "/api/2.0/mlflow/runs/set-tag", body=body, headers=headers)
 
     def update_experiment(self, experiment_id: str, *, new_name: Optional[str] = None):
         """Update an experiment.
-        
+
         Updates experiment metadata.
-        
+
         :param experiment_id: str
           ID of the associated experiment.
         :param new_name: str (optional)
           If provided, the experiment's name is changed to the new name. The new name must be unique.
-        
-        
+
+
         """
         body = {}
-        if experiment_id is not None: body['experiment_id'] = experiment_id
-        if new_name is not None: body['new_name'] = new_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/experiments/update', body=body, headers=headers)
+        if experiment_id is not None:
+            body["experiment_id"] = experiment_id
+        if new_name is not None:
+            body["new_name"] = new_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/experiments/update",
+            body=body,
+            headers=headers,
+        )
 
     def update_permissions(
-            self,
-            experiment_id: str,
-            *,
-            access_control_list: Optional[List[ExperimentAccessControlRequest]] = None
+        self,
+        experiment_id: str,
+        *,
+        access_control_list: Optional[List[ExperimentAccessControlRequest]] = None,
     ) -> ExperimentPermissions:
         """Update experiment permissions.
-        
+
         Updates the permissions on an experiment. Experiments can inherit permissions from their root object.
-        
+
         :param experiment_id: str
           The experiment for which to get or manage permissions.
         :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ExperimentPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/experiments/{experiment_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/experiments/{experiment_id}",
+            body=body,
+            headers=headers,
+        )
         return ExperimentPermissions.from_dict(res)
 
-    def update_run(self,
-                   *,
-                   end_time: Optional[int] = None,
-                   run_id: Optional[str] = None,
-                   run_uuid: Optional[str] = None,
-                   status: Optional[UpdateRunStatus] = None) -> UpdateRunResponse:
+    def update_run(
+        self,
+        *,
+        end_time: Optional[int] = None,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+        status: Optional[UpdateRunStatus] = None,
+    ) -> UpdateRunResponse:
         """Update a run.
-        
+
         Updates run metadata.
-        
+
         :param end_time: int (optional)
           Unix timestamp in milliseconds of when the run ended.
         :param run_id: str (optional)
@@ -5745,17 +6831,24 @@ def update_run(self,
           MLflow version.
         :param status: :class:`UpdateRunStatus` (optional)
           Updated status of the run.
-        
+
         :returns: :class:`UpdateRunResponse`
         """
         body = {}
-        if end_time is not None: body['end_time'] = end_time
-        if run_id is not None: body['run_id'] = run_id
-        if run_uuid is not None: body['run_uuid'] = run_uuid
-        if status is not None: body['status'] = status.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/runs/update', body=body, headers=headers)
+        if end_time is not None:
+            body["end_time"] = end_time
+        if run_id is not None:
+            body["run_id"] = run_id
+        if run_uuid is not None:
+            body["run_uuid"] = run_uuid
+        if status is not None:
+            body["status"] = status.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/mlflow/runs/update", body=body, headers=headers)
         return UpdateRunResponse.from_dict(res)
 
 
@@ -5764,122 +6857,162 @@ class ModelRegistryAPI:
     [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides
     centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry
     will be deprecated in the future.
-    
+
     The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to
     manage the full lifecycle of MLflow Models."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def approve_transition_request(self,
-                                   name: str,
-                                   version: str,
-                                   stage: Stage,
-                                   archive_existing_versions: bool,
-                                   *,
-                                   comment: Optional[str] = None) -> ApproveTransitionRequestResponse:
+    def approve_transition_request(
+        self,
+        name: str,
+        version: str,
+        stage: Stage,
+        archive_existing_versions: bool,
+        *,
+        comment: Optional[str] = None,
+    ) -> ApproveTransitionRequestResponse:
         """Approve transition request.
-        
+
         Approves a model version stage transition request.
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
         :param stage: :class:`Stage`
           Target stage of the transition. Valid values are:
-          
+
           * `None`: The initial stage of a model version.
-          
+
           * `Staging`: Staging or pre-production stage.
-          
+
           * `Production`: Production stage.
-          
+
           * `Archived`: Archived stage.
         :param archive_existing_versions: bool
           Specifies whether to archive all current model versions in the target stage.
         :param comment: str (optional)
           User-provided comment on the action.
-        
+
         :returns: :class:`ApproveTransitionRequestResponse`
         """
         body = {}
         if archive_existing_versions is not None:
-            body['archive_existing_versions'] = archive_existing_versions
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if stage is not None: body['stage'] = stage.value
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/transition-requests/approve', body=body, headers=headers)
+            body["archive_existing_versions"] = archive_existing_versions
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if stage is not None:
+            body["stage"] = stage.value
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/transition-requests/approve",
+            body=body,
+            headers=headers,
+        )
         return ApproveTransitionRequestResponse.from_dict(res)
 
     def create_comment(self, name: str, version: str, comment: str) -> CreateCommentResponse:
         """Post a comment.
-        
+
         Posts a comment on a model version. A comment can be submitted either by a user or programmatically to
         display relevant information about the model. For example, test results or deployment errors.
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
         :param comment: str
           User-provided comment on the action.
-        
+
         :returns: :class:`CreateCommentResponse`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/comments/create', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/comments/create",
+            body=body,
+            headers=headers,
+        )
         return CreateCommentResponse.from_dict(res)
 
-    def create_model(self,
-                     name: str,
-                     *,
-                     description: Optional[str] = None,
-                     tags: Optional[List[ModelTag]] = None) -> CreateModelResponse:
+    def create_model(
+        self,
+        name: str,
+        *,
+        description: Optional[str] = None,
+        tags: Optional[List[ModelTag]] = None,
+    ) -> CreateModelResponse:
         """Create a model.
-        
+
         Creates a new registered model with the name specified in the request body.
-        
+
         Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
-        
+
         :param name: str
           Register models under this name
         :param description: str (optional)
           Optional description for registered model.
         :param tags: List[:class:`ModelTag`] (optional)
           Additional metadata for registered model.
-        
+
         :returns: :class:`CreateModelResponse`
         """
         body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if tags is not None: body['tags'] = [v.as_dict() for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/registered-models/create', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if tags is not None:
+            body["tags"] = [v.as_dict() for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/registered-models/create",
+            body=body,
+            headers=headers,
+        )
         return CreateModelResponse.from_dict(res)
 
-    def create_model_version(self,
-                             name: str,
-                             source: str,
-                             *,
-                             description: Optional[str] = None,
-                             run_id: Optional[str] = None,
-                             run_link: Optional[str] = None,
-                             tags: Optional[List[ModelVersionTag]] = None) -> CreateModelVersionResponse:
+    def create_model_version(
+        self,
+        name: str,
+        source: str,
+        *,
+        description: Optional[str] = None,
+        run_id: Optional[str] = None,
+        run_link: Optional[str] = None,
+        tags: Optional[List[ModelVersionTag]] = None,
+    ) -> CreateModelVersionResponse:
         """Create a model version.
-        
+
         Creates a model version.
-        
+
         :param name: str
           Register model under this name
         :param source: str
@@ -5894,102 +7027,132 @@ def create_model_version(self,
           hosted at another instance of MLflow.
         :param tags: List[:class:`ModelVersionTag`] (optional)
           Additional metadata for model version.
-        
+
         :returns: :class:`CreateModelVersionResponse`
         """
         body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if run_id is not None: body['run_id'] = run_id
-        if run_link is not None: body['run_link'] = run_link
-        if source is not None: body['source'] = source
-        if tags is not None: body['tags'] = [v.as_dict() for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/model-versions/create', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if run_id is not None:
+            body["run_id"] = run_id
+        if run_link is not None:
+            body["run_link"] = run_link
+        if source is not None:
+            body["source"] = source
+        if tags is not None:
+            body["tags"] = [v.as_dict() for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/model-versions/create",
+            body=body,
+            headers=headers,
+        )
         return CreateModelVersionResponse.from_dict(res)
 
-    def create_transition_request(self,
-                                  name: str,
-                                  version: str,
-                                  stage: Stage,
-                                  *,
-                                  comment: Optional[str] = None) -> CreateTransitionRequestResponse:
+    def create_transition_request(
+        self,
+        name: str,
+        version: str,
+        stage: Stage,
+        *,
+        comment: Optional[str] = None,
+    ) -> CreateTransitionRequestResponse:
         """Make a transition request.
-        
+
         Creates a model version stage transition request.
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
         :param stage: :class:`Stage`
           Target stage of the transition. Valid values are:
-          
+
           * `None`: The initial stage of a model version.
-          
+
           * `Staging`: Staging or pre-production stage.
-          
+
           * `Production`: Production stage.
-          
+
           * `Archived`: Archived stage.
         :param comment: str (optional)
           User-provided comment on the action.
-        
+
         :returns: :class:`CreateTransitionRequestResponse`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if stage is not None: body['stage'] = stage.value
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/transition-requests/create', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if stage is not None:
+            body["stage"] = stage.value
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/transition-requests/create",
+            body=body,
+            headers=headers,
+        )
         return CreateTransitionRequestResponse.from_dict(res)
 
-    def create_webhook(self,
-                       events: List[RegistryWebhookEvent],
-                       *,
-                       description: Optional[str] = None,
-                       http_url_spec: Optional[HttpUrlSpec] = None,
-                       job_spec: Optional[JobSpec] = None,
-                       model_name: Optional[str] = None,
-                       status: Optional[RegistryWebhookStatus] = None) -> CreateWebhookResponse:
+    def create_webhook(
+        self,
+        events: List[RegistryWebhookEvent],
+        *,
+        description: Optional[str] = None,
+        http_url_spec: Optional[HttpUrlSpec] = None,
+        job_spec: Optional[JobSpec] = None,
+        model_name: Optional[str] = None,
+        status: Optional[RegistryWebhookStatus] = None,
+    ) -> CreateWebhookResponse:
         """Create a webhook.
-        
+
         **NOTE**: This endpoint is in Public Preview.
-        
+
         Creates a registry webhook.
-        
+
         :param events: List[:class:`RegistryWebhookEvent`]
           Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
           created for the associated model.
-          
+
           * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
-          
+
           * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
-          
+
           * `COMMENT_CREATED`: A user wrote a comment on a registered model.
-          
+
           * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
           specified for a registry-wide webhook, which can be created by not specifying a model name in the
           create request.
-          
+
           * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
-          
+
           * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
-          
+
           * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
-          
+
           * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
-          
+
           * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
           staging.
-          
+
           * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
           production.
-          
+
           * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
         :param description: str (optional)
           User-specified description for the webhook.
@@ -6000,105 +7163,153 @@ def create_webhook(self,
         :param status: :class:`RegistryWebhookStatus` (optional)
           Enable or disable triggering the webhook, or put the webhook into test mode. The default is
           `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-          
+
           * `DISABLED`: Webhook is not triggered.
-          
+
           * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
           event.
-        
+
         :returns: :class:`CreateWebhookResponse`
         """
         body = {}
-        if description is not None: body['description'] = description
-        if events is not None: body['events'] = [v.value for v in events]
-        if http_url_spec is not None: body['http_url_spec'] = http_url_spec.as_dict()
-        if job_spec is not None: body['job_spec'] = job_spec.as_dict()
-        if model_name is not None: body['model_name'] = model_name
-        if status is not None: body['status'] = status.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/registry-webhooks/create', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if events is not None:
+            body["events"] = [v.value for v in events]
+        if http_url_spec is not None:
+            body["http_url_spec"] = http_url_spec.as_dict()
+        if job_spec is not None:
+            body["job_spec"] = job_spec.as_dict()
+        if model_name is not None:
+            body["model_name"] = model_name
+        if status is not None:
+            body["status"] = status.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/registry-webhooks/create",
+            body=body,
+            headers=headers,
+        )
         return CreateWebhookResponse.from_dict(res)
 
     def delete_comment(self, id: str):
         """Delete a comment.
-        
+
         Deletes a comment on a model version.
-        
+
         :param id: str
-        
-        
+
+
         """
 
         query = {}
-        if id is not None: query['id'] = id
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/comments/delete', query=query, headers=headers)
+        if id is not None:
+            query["id"] = id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/comments/delete",
+            query=query,
+            headers=headers,
+        )
 
     def delete_model(self, name: str):
         """Delete a model.
-        
+
         Deletes a registered model.
-        
+
         :param name: str
           Registered model unique name identifier.
-        
-        
+
+
         """
 
         query = {}
-        if name is not None: query['name'] = name
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/registered-models/delete', query=query, headers=headers)
+        if name is not None:
+            query["name"] = name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/registered-models/delete",
+            query=query,
+            headers=headers,
+        )
 
     def delete_model_tag(self, name: str, key: str):
         """Delete a model tag.
-        
+
         Deletes the tag for a registered model.
-        
+
         :param name: str
           Name of the registered model that the tag was logged under.
         :param key: str
           Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
           is 250 bytes.
-        
-        
+
+
         """
 
         query = {}
-        if key is not None: query['key'] = key
-        if name is not None: query['name'] = name
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/registered-models/delete-tag', query=query, headers=headers)
+        if key is not None:
+            query["key"] = key
+        if name is not None:
+            query["name"] = name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/registered-models/delete-tag",
+            query=query,
+            headers=headers,
+        )
 
     def delete_model_version(self, name: str, version: str):
         """Delete a model version.
-        
+
         Deletes a model version.
-        
+
         :param name: str
           Name of the registered model
         :param version: str
           Model version number
-        
-        
+
+
         """
 
         query = {}
-        if name is not None: query['name'] = name
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/model-versions/delete', query=query, headers=headers)
+        if name is not None:
+            query["name"] = name
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/model-versions/delete",
+            query=query,
+            headers=headers,
+        )
 
     def delete_model_version_tag(self, name: str, version: str, key: str):
         """Delete a model version tag.
-        
+
         Deletes a model version tag.
-        
+
         :param name: str
           Name of the registered model that the tag was logged under.
         :param version: str
@@ -6106,275 +7317,364 @@ def delete_model_version_tag(self, name: str, version: str, key: str):
         :param key: str
           Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
           is 250 bytes.
-        
-        
+
+
         """
 
         query = {}
-        if key is not None: query['key'] = key
-        if name is not None: query['name'] = name
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/model-versions/delete-tag', query=query, headers=headers)
-
-    def delete_transition_request(self,
-                                  name: str,
-                                  version: str,
-                                  stage: DeleteTransitionRequestStage,
-                                  creator: str,
-                                  *,
-                                  comment: Optional[str] = None):
+        if key is not None:
+            query["key"] = key
+        if name is not None:
+            query["name"] = name
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/model-versions/delete-tag",
+            query=query,
+            headers=headers,
+        )
+
+    def delete_transition_request(
+        self,
+        name: str,
+        version: str,
+        stage: DeleteTransitionRequestStage,
+        creator: str,
+        *,
+        comment: Optional[str] = None,
+    ):
         """Delete a transition request.
-        
+
         Cancels a model version stage transition request.
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
         :param stage: :class:`DeleteTransitionRequestStage`
           Target stage of the transition request. Valid values are:
-          
+
           * `None`: The initial stage of a model version.
-          
+
           * `Staging`: Staging or pre-production stage.
-          
+
           * `Production`: Production stage.
-          
+
           * `Archived`: Archived stage.
         :param creator: str
           Username of the user who created this request. Of the transition requests matching the specified
           details, only the one transition created by this user will be deleted.
         :param comment: str (optional)
           User-provided comment on the action.
-        
-        
+
+
         """
 
         query = {}
-        if comment is not None: query['comment'] = comment
-        if creator is not None: query['creator'] = creator
-        if name is not None: query['name'] = name
-        if stage is not None: query['stage'] = stage.value
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/transition-requests/delete', query=query, headers=headers)
+        if comment is not None:
+            query["comment"] = comment
+        if creator is not None:
+            query["creator"] = creator
+        if name is not None:
+            query["name"] = name
+        if stage is not None:
+            query["stage"] = stage.value
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/transition-requests/delete",
+            query=query,
+            headers=headers,
+        )
 
     def delete_webhook(self, *, id: Optional[str] = None):
         """Delete a webhook.
-        
+
         **NOTE:** This endpoint is in Public Preview.
-        
+
         Deletes a registry webhook.
-        
+
         :param id: str (optional)
           Webhook ID required to delete a registry webhook.
-        
-        
+
+
         """
 
         query = {}
-        if id is not None: query['id'] = id
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', '/api/2.0/mlflow/registry-webhooks/delete', query=query, headers=headers)
+        if id is not None:
+            query["id"] = id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        self._api.do(
+            "DELETE",
+            "/api/2.0/mlflow/registry-webhooks/delete",
+            query=query,
+            headers=headers,
+        )
 
     def get_latest_versions(self, name: str, *, stages: Optional[List[str]] = None) -> Iterator[ModelVersion]:
         """Get the latest version.
-        
+
         Gets the latest version of a registered model.
-        
+
         :param name: str
           Registered model unique name identifier.
         :param stages: List[str] (optional)
           List of stages.
-        
+
         :returns: Iterator over :class:`ModelVersion`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if stages is not None: body['stages'] = [v for v in stages]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        json = self._api.do('POST',
-                            '/api/2.0/mlflow/registered-models/get-latest-versions',
-                            body=body,
-                            headers=headers)
+        if name is not None:
+            body["name"] = name
+        if stages is not None:
+            body["stages"] = [v for v in stages]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        json = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/registered-models/get-latest-versions",
+            body=body,
+            headers=headers,
+        )
         parsed = GetLatestVersionsResponse.from_dict(json).model_versions
         return parsed if parsed is not None else []
 
     def get_model(self, name: str) -> GetModelResponse:
         """Get model.
-        
+
         Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also
         returns the model's Databricks workspace ID and the permission level of the requesting user on the
         model.
-        
+
         [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel
-        
+
         :param name: str
           Registered model unique name identifier.
-        
+
         :returns: :class:`GetModelResponse`
         """
 
         query = {}
-        if name is not None: query['name'] = name
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           '/api/2.0/mlflow/databricks/registered-models/get',
-                           query=query,
-                           headers=headers)
+        if name is not None:
+            query["name"] = name
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/mlflow/databricks/registered-models/get",
+            query=query,
+            headers=headers,
+        )
         return GetModelResponse.from_dict(res)
 
     def get_model_version(self, name: str, version: str) -> GetModelVersionResponse:
         """Get a model version.
-        
+
         Get a model version.
-        
+
         :param name: str
           Name of the registered model
         :param version: str
           Model version number
-        
+
         :returns: :class:`GetModelVersionResponse`
         """
 
         query = {}
-        if name is not None: query['name'] = name
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/mlflow/model-versions/get', query=query, headers=headers)
+        if name is not None:
+            query["name"] = name
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/mlflow/model-versions/get",
+            query=query,
+            headers=headers,
+        )
         return GetModelVersionResponse.from_dict(res)
 
     def get_model_version_download_uri(self, name: str, version: str) -> GetModelVersionDownloadUriResponse:
         """Get a model version URI.
-        
+
         Gets a URI to download the model version.
-        
+
         :param name: str
           Name of the registered model
         :param version: str
           Model version number
-        
+
         :returns: :class:`GetModelVersionDownloadUriResponse`
         """
 
         query = {}
-        if name is not None: query['name'] = name
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           '/api/2.0/mlflow/model-versions/get-download-uri',
-                           query=query,
-                           headers=headers)
+        if name is not None:
+            query["name"] = name
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            "/api/2.0/mlflow/model-versions/get-download-uri",
+            query=query,
+            headers=headers,
+        )
         return GetModelVersionDownloadUriResponse.from_dict(res)
 
     def get_permission_levels(self, registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse:
         """Get registered model permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
-        
+
         :returns: :class:`GetRegisteredModelPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/registered-models/{registered_model_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/registered-models/{registered_model_id}/permissionLevels",
+            headers=headers,
+        )
         return GetRegisteredModelPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, registered_model_id: str) -> RegisteredModelPermissions:
         """Get registered model permissions.
-        
+
         Gets the permissions of a registered model. Registered models can inherit permissions from their root
         object.
-        
+
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
-        
+
         :returns: :class:`RegisteredModelPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/registered-models/{registered_model_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/registered-models/{registered_model_id}",
+            headers=headers,
+        )
         return RegisteredModelPermissions.from_dict(res)
 
-    def list_models(self,
-                    *,
-                    max_results: Optional[int] = None,
-                    page_token: Optional[str] = None) -> Iterator[Model]:
+    def list_models(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Model]:
         """List models.
-        
+
         Lists all available registered models, up to the limit specified in __max_results__.
-        
+
         :param max_results: int (optional)
           Maximum number of registered models desired. Max threshold is 1000.
         :param page_token: str (optional)
           Pagination token to go to the next page based on a previous query.
-        
+
         :returns: Iterator over :class:`Model`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/mlflow/registered-models/list', query=query, headers=headers)
-            if 'registered_models' in json:
-                for v in json['registered_models']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/registered-models/list",
+                query=query,
+                headers=headers,
+            )
+            if "registered_models" in json:
+                for v in json["registered_models"]:
                     yield Model.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def list_transition_requests(self, name: str, version: str) -> Iterator[Activity]:
         """List transition requests.
-        
+
         Gets a list of all open stage transition requests for the model version.
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
-        
+
         :returns: Iterator over :class:`Activity`
         """
 
         query = {}
-        if name is not None: query['name'] = name
-        if version is not None: query['version'] = version
-        headers = {'Accept': 'application/json', }
-
-        json = self._api.do('GET', '/api/2.0/mlflow/transition-requests/list', query=query, headers=headers)
+        if name is not None:
+            query["name"] = name
+        if version is not None:
+            query["version"] = version
+        headers = {
+            "Accept": "application/json",
+        }
+
+        json = self._api.do(
+            "GET",
+            "/api/2.0/mlflow/transition-requests/list",
+            query=query,
+            headers=headers,
+        )
         parsed = ListTransitionRequestsResponse.from_dict(json).requests
         return parsed if parsed is not None else []
 
-    def list_webhooks(self,
-                      *,
-                      events: Optional[List[RegistryWebhookEvent]] = None,
-                      model_name: Optional[str] = None,
-                      page_token: Optional[str] = None) -> Iterator[RegistryWebhook]:
+    def list_webhooks(
+        self,
+        *,
+        events: Optional[List[RegistryWebhookEvent]] = None,
+        model_name: Optional[str] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[RegistryWebhook]:
         """List registry webhooks.
-        
+
         **NOTE:** This endpoint is in Public Preview.
-        
+
         Lists all registry webhooks.
-        
+
         :param events: List[:class:`RegistryWebhookEvent`] (optional)
           If `events` is specified, any webhook with one or more of the specified trigger events is included
           in the output. If `events` is not specified, webhooks of all event types are included in the output.
@@ -6383,94 +7683,130 @@ def list_webhooks(self,
           associated model.
         :param page_token: str (optional)
           Token indicating the page of artifact results to fetch
-        
+
         :returns: Iterator over :class:`RegistryWebhook`
         """
 
         query = {}
-        if events is not None: query['events'] = [v.value for v in events]
-        if model_name is not None: query['model_name'] = model_name
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if events is not None:
+            query["events"] = [v.value for v in events]
+        if model_name is not None:
+            query["model_name"] = model_name
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/mlflow/registry-webhooks/list', query=query, headers=headers)
-            if 'webhooks' in json:
-                for v in json['webhooks']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/registry-webhooks/list",
+                query=query,
+                headers=headers,
+            )
+            if "webhooks" in json:
+                for v in json["webhooks"]:
                     yield RegistryWebhook.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def reject_transition_request(self,
-                                  name: str,
-                                  version: str,
-                                  stage: Stage,
-                                  *,
-                                  comment: Optional[str] = None) -> RejectTransitionRequestResponse:
+            query["page_token"] = json["next_page_token"]
+
+    def reject_transition_request(
+        self,
+        name: str,
+        version: str,
+        stage: Stage,
+        *,
+        comment: Optional[str] = None,
+    ) -> RejectTransitionRequestResponse:
         """Reject a transition request.
-        
+
         Rejects a model version stage transition request.
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
         :param stage: :class:`Stage`
           Target stage of the transition. Valid values are:
-          
+
           * `None`: The initial stage of a model version.
-          
+
           * `Staging`: Staging or pre-production stage.
-          
+
           * `Production`: Production stage.
-          
+
           * `Archived`: Archived stage.
         :param comment: str (optional)
           User-provided comment on the action.
-        
+
         :returns: :class:`RejectTransitionRequestResponse`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if stage is not None: body['stage'] = stage.value
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/transition-requests/reject', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if stage is not None:
+            body["stage"] = stage.value
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/transition-requests/reject",
+            body=body,
+            headers=headers,
+        )
         return RejectTransitionRequestResponse.from_dict(res)
 
     def rename_model(self, name: str, *, new_name: Optional[str] = None) -> RenameModelResponse:
         """Rename a model.
-        
+
         Renames a registered model.
-        
+
         :param name: str
           Registered model unique name identifier.
         :param new_name: str (optional)
           If provided, updates the name for this `registered_model`.
-        
+
         :returns: :class:`RenameModelResponse`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if new_name is not None: body['new_name'] = new_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/registered-models/rename', body=body, headers=headers)
+        if name is not None:
+            body["name"] = name
+        if new_name is not None:
+            body["new_name"] = new_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/registered-models/rename",
+            body=body,
+            headers=headers,
+        )
         return RenameModelResponse.from_dict(res)
 
-    def search_model_versions(self,
-                              *,
-                              filter: Optional[str] = None,
-                              max_results: Optional[int] = None,
-                              order_by: Optional[List[str]] = None,
-                              page_token: Optional[str] = None) -> Iterator[ModelVersion]:
+    def search_model_versions(
+        self,
+        *,
+        filter: Optional[str] = None,
+        max_results: Optional[int] = None,
+        order_by: Optional[List[str]] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ModelVersion]:
         """Searches model versions.
-        
+
         Searches for specific model versions based on the supplied __filter__.
-        
+
         :param filter: str (optional)
           String filter condition, like "name='my-model-name'". Must be a single boolean condition, with
           string values wrapped in single quotes.
@@ -6482,36 +7818,49 @@ def search_model_versions(self,
           timestamp, followed by name ASC, followed by version DESC.
         :param page_token: str (optional)
           Pagination token to go to next page based on previous search query.
-        
+
         :returns: Iterator over :class:`ModelVersion`
         """
 
         query = {}
-        if filter is not None: query['filter'] = filter
-        if max_results is not None: query['max_results'] = max_results
-        if order_by is not None: query['order_by'] = [v for v in order_by]
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if filter is not None:
+            query["filter"] = filter
+        if max_results is not None:
+            query["max_results"] = max_results
+        if order_by is not None:
+            query["order_by"] = [v for v in order_by]
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/mlflow/model-versions/search', query=query, headers=headers)
-            if 'model_versions' in json:
-                for v in json['model_versions']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/model-versions/search",
+                query=query,
+                headers=headers,
+            )
+            if "model_versions" in json:
+                for v in json["model_versions"]:
                     yield ModelVersion.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def search_models(self,
-                      *,
-                      filter: Optional[str] = None,
-                      max_results: Optional[int] = None,
-                      order_by: Optional[List[str]] = None,
-                      page_token: Optional[str] = None) -> Iterator[Model]:
+            query["page_token"] = json["next_page_token"]
+
+    def search_models(
+        self,
+        *,
+        filter: Optional[str] = None,
+        max_results: Optional[int] = None,
+        order_by: Optional[List[str]] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Model]:
         """Search models.
-        
+
         Search for registered models based on the specified __filter__.
-        
+
         :param filter: str (optional)
           String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically
           as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single
@@ -6524,34 +7873,42 @@ def search_models(self,
           name ASC.
         :param page_token: str (optional)
           Pagination token to go to the next page based on a previous search query.
-        
+
         :returns: Iterator over :class:`Model`
         """
 
         query = {}
-        if filter is not None: query['filter'] = filter
-        if max_results is not None: query['max_results'] = max_results
-        if order_by is not None: query['order_by'] = [v for v in order_by]
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if filter is not None:
+            query["filter"] = filter
+        if max_results is not None:
+            query["max_results"] = max_results
+        if order_by is not None:
+            query["order_by"] = [v for v in order_by]
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                '/api/2.0/mlflow/registered-models/search',
-                                query=query,
-                                headers=headers)
-            if 'registered_models' in json:
-                for v in json['registered_models']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/mlflow/registered-models/search",
+                query=query,
+                headers=headers,
+            )
+            if "registered_models" in json:
+                for v in json["registered_models"]:
                     yield Model.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def set_model_tag(self, name: str, key: str, value: str):
         """Set a tag.
-        
+
         Sets a tag on a registered model.
-        
+
         :param name: str
           Unique name of the model.
         :param key: str
@@ -6561,22 +7918,33 @@ def set_model_tag(self, name: str, key: str, value: str):
         :param value: str
           String value of the tag being logged. Maximum size depends on storage backend. All storage backends
           are guaranteed to support key values up to 5000 bytes in size.
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if name is not None: body['name'] = name
-        if value is not None: body['value'] = value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/registered-models/set-tag', body=body, headers=headers)
+        if key is not None:
+            body["key"] = key
+        if name is not None:
+            body["name"] = name
+        if value is not None:
+            body["value"] = value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/registered-models/set-tag",
+            body=body,
+            headers=headers,
+        )
 
     def set_model_version_tag(self, name: str, version: str, key: str, value: str):
         """Set a version tag.
-        
+
         Sets a model version tag.
-        
+
         :param name: str
           Unique name of the model.
         :param version: str
@@ -6588,225 +7956,300 @@ def set_model_version_tag(self, name: str, version: str, key: str, value: str):
         :param value: str
           String value of the tag being logged. Maximum size depends on storage backend. All storage backends
           are guaranteed to support key values up to 5000 bytes in size.
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if name is not None: body['name'] = name
-        if value is not None: body['value'] = value
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/mlflow/model-versions/set-tag', body=body, headers=headers)
+        if key is not None:
+            body["key"] = key
+        if name is not None:
+            body["name"] = name
+        if value is not None:
+            body["value"] = value
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/mlflow/model-versions/set-tag",
+            body=body,
+            headers=headers,
+        )
 
     def set_permissions(
         self,
         registered_model_id: str,
         *,
-        access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None
+        access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None,
     ) -> RegisteredModelPermissions:
         """Set registered model permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
         :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
-        
+
         :returns: :class:`RegisteredModelPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/registered-models/{registered_model_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/registered-models/{registered_model_id}",
+            body=body,
+            headers=headers,
+        )
         return RegisteredModelPermissions.from_dict(res)
 
-    def test_registry_webhook(self,
-                              id: str,
-                              *,
-                              event: Optional[RegistryWebhookEvent] = None) -> TestRegistryWebhookResponse:
+    def test_registry_webhook(
+        self, id: str, *, event: Optional[RegistryWebhookEvent] = None
+    ) -> TestRegistryWebhookResponse:
         """Test a webhook.
-        
+
         **NOTE:** This endpoint is in Public Preview.
-        
+
         Tests a registry webhook.
-        
+
         :param id: str
           Webhook ID
         :param event: :class:`RegistryWebhookEvent` (optional)
           If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the
           test trigger uses a randomly chosen event associated with the webhook.
-        
+
         :returns: :class:`TestRegistryWebhookResponse`
         """
         body = {}
-        if event is not None: body['event'] = event.value
-        if id is not None: body['id'] = id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/mlflow/registry-webhooks/test', body=body, headers=headers)
+        if event is not None:
+            body["event"] = event.value
+        if id is not None:
+            body["id"] = id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/registry-webhooks/test",
+            body=body,
+            headers=headers,
+        )
         return TestRegistryWebhookResponse.from_dict(res)
 
-    def transition_stage(self,
-                         name: str,
-                         version: str,
-                         stage: Stage,
-                         archive_existing_versions: bool,
-                         *,
-                         comment: Optional[str] = None) -> TransitionStageResponse:
+    def transition_stage(
+        self,
+        name: str,
+        version: str,
+        stage: Stage,
+        archive_existing_versions: bool,
+        *,
+        comment: Optional[str] = None,
+    ) -> TransitionStageResponse:
         """Transition a stage.
-        
+
         Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint]
         that also accepts a comment associated with the transition to be recorded.",
-        
+
         [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
-        
+
         :param name: str
           Name of the model.
         :param version: str
           Version of the model.
         :param stage: :class:`Stage`
           Target stage of the transition. Valid values are:
-          
+
           * `None`: The initial stage of a model version.
-          
+
           * `Staging`: Staging or pre-production stage.
-          
+
           * `Production`: Production stage.
-          
+
           * `Archived`: Archived stage.
         :param archive_existing_versions: bool
           Specifies whether to archive all current model versions in the target stage.
         :param comment: str (optional)
           User-provided comment on the action.
-        
+
         :returns: :class:`TransitionStageResponse`
         """
         body = {}
         if archive_existing_versions is not None:
-            body['archive_existing_versions'] = archive_existing_versions
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if stage is not None: body['stage'] = stage.value
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           '/api/2.0/mlflow/databricks/model-versions/transition-stage',
-                           body=body,
-                           headers=headers)
+            body["archive_existing_versions"] = archive_existing_versions
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if stage is not None:
+            body["stage"] = stage.value
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/mlflow/databricks/model-versions/transition-stage",
+            body=body,
+            headers=headers,
+        )
         return TransitionStageResponse.from_dict(res)
 
     def update_comment(self, id: str, comment: str) -> UpdateCommentResponse:
         """Update a comment.
-        
+
         Post an edit to a comment on a model version.
-        
+
         :param id: str
           Unique identifier of an activity
         :param comment: str
           User-provided comment on the action.
-        
+
         :returns: :class:`UpdateCommentResponse`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if id is not None: body['id'] = id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', '/api/2.0/mlflow/comments/update', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if id is not None:
+            body["id"] = id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/mlflow/comments/update",
+            body=body,
+            headers=headers,
+        )
         return UpdateCommentResponse.from_dict(res)
 
     def update_model(self, name: str, *, description: Optional[str] = None):
         """Update model.
-        
+
         Updates a registered model.
-        
+
         :param name: str
           Registered model unique name identifier.
         :param description: str (optional)
           If provided, updates the description for this `registered_model`.
-        
-        
+
+
         """
         body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', '/api/2.0/mlflow/registered-models/update', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            "/api/2.0/mlflow/registered-models/update",
+            body=body,
+            headers=headers,
+        )
 
     def update_model_version(self, name: str, version: str, *, description: Optional[str] = None):
         """Update model version.
-        
+
         Updates the model version.
-        
+
         :param name: str
           Name of the registered model
         :param version: str
           Model version number
         :param description: str (optional)
           If provided, updates the description for this `registered_model`.
-        
-        
+
+
         """
         body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if version is not None: body['version'] = version
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', '/api/2.0/mlflow/model-versions/update', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if version is not None:
+            body["version"] = version
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            "/api/2.0/mlflow/model-versions/update",
+            body=body,
+            headers=headers,
+        )
 
     def update_permissions(
         self,
         registered_model_id: str,
         *,
-        access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None
+        access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None,
     ) -> RegisteredModelPermissions:
         """Update registered model permissions.
-        
+
         Updates the permissions on a registered model. Registered models can inherit permissions from their
         root object.
-        
+
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
         :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
-        
+
         :returns: :class:`RegisteredModelPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/registered-models/{registered_model_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/registered-models/{registered_model_id}",
+            body=body,
+            headers=headers,
+        )
         return RegisteredModelPermissions.from_dict(res)
 
-    def update_webhook(self,
-                       id: str,
-                       *,
-                       description: Optional[str] = None,
-                       events: Optional[List[RegistryWebhookEvent]] = None,
-                       http_url_spec: Optional[HttpUrlSpec] = None,
-                       job_spec: Optional[JobSpec] = None,
-                       status: Optional[RegistryWebhookStatus] = None):
+    def update_webhook(
+        self,
+        id: str,
+        *,
+        description: Optional[str] = None,
+        events: Optional[List[RegistryWebhookEvent]] = None,
+        http_url_spec: Optional[HttpUrlSpec] = None,
+        job_spec: Optional[JobSpec] = None,
+        status: Optional[RegistryWebhookStatus] = None,
+    ):
         """Update a webhook.
-        
+
         **NOTE:** This endpoint is in Public Preview.
-        
+
         Updates a registry webhook.
-        
+
         :param id: str
           Webhook ID
         :param description: str (optional)
@@ -6814,52 +8257,66 @@ def update_webhook(self,
         :param events: List[:class:`RegistryWebhookEvent`] (optional)
           Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
           created for the associated model.
-          
+
           * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
-          
+
           * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
-          
+
           * `COMMENT_CREATED`: A user wrote a comment on a registered model.
-          
+
           * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
           specified for a registry-wide webhook, which can be created by not specifying a model name in the
           create request.
-          
+
           * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
-          
+
           * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
-          
+
           * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
-          
+
           * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
-          
+
           * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
           staging.
-          
+
           * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
           production.
-          
+
           * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
         :param http_url_spec: :class:`HttpUrlSpec` (optional)
         :param job_spec: :class:`JobSpec` (optional)
         :param status: :class:`RegistryWebhookStatus` (optional)
           Enable or disable triggering the webhook, or put the webhook into test mode. The default is
           `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-          
+
           * `DISABLED`: Webhook is not triggered.
-          
+
           * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
           event.
-        
-        
+
+
         """
         body = {}
-        if description is not None: body['description'] = description
-        if events is not None: body['events'] = [v.value for v in events]
-        if http_url_spec is not None: body['http_url_spec'] = http_url_spec.as_dict()
-        if id is not None: body['id'] = id
-        if job_spec is not None: body['job_spec'] = job_spec.as_dict()
-        if status is not None: body['status'] = status.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', '/api/2.0/mlflow/registry-webhooks/update', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if events is not None:
+            body["events"] = [v.value for v in events]
+        if http_url_spec is not None:
+            body["http_url_spec"] = http_url_spec.as_dict()
+        if id is not None:
+            body["id"] = id
+        if job_spec is not None:
+            body["job_spec"] = job_spec.as_dict()
+        if status is not None:
+            body["status"] = status.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            "/api/2.0/mlflow/registry-webhooks/update",
+            body=body,
+            headers=headers,
+        )
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 37d464af6..92e7f6075 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -8,7 +8,7 @@
 
 from ._internal import _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -38,35 +38,48 @@ class CreateCustomAppIntegration:
     def as_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.confidential is not None: body['confidential'] = self.confidential
-        if self.name is not None: body['name'] = self.name
-        if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.confidential is not None:
+            body["confidential"] = self.confidential
+        if self.name is not None:
+            body["name"] = self.name
+        if self.redirect_urls:
+            body["redirect_urls"] = [v for v in self.redirect_urls]
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy.as_dict()
         if self.user_authorized_scopes:
-            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
+            body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.confidential is not None: body['confidential'] = self.confidential
-        if self.name is not None: body['name'] = self.name
-        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
-        if self.scopes: body['scopes'] = self.scopes
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
-        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
+        if self.confidential is not None:
+            body["confidential"] = self.confidential
+        if self.name is not None:
+            body["name"] = self.name
+        if self.redirect_urls:
+            body["redirect_urls"] = self.redirect_urls
+        if self.scopes:
+            body["scopes"] = self.scopes
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy
+        if self.user_authorized_scopes:
+            body["user_authorized_scopes"] = self.user_authorized_scopes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration:
         """Deserializes the CreateCustomAppIntegration from a dictionary."""
-        return cls(confidential=d.get('confidential', None),
-                   name=d.get('name', None),
-                   redirect_urls=d.get('redirect_urls', None),
-                   scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
-                   user_authorized_scopes=d.get('user_authorized_scopes', None))
+        return cls(
+            confidential=d.get("confidential", None),
+            name=d.get("name", None),
+            redirect_urls=d.get("redirect_urls", None),
+            scopes=d.get("scopes", None),
+            token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy),
+            user_authorized_scopes=d.get("user_authorized_scopes", None),
+        )
 
 
 @dataclass
@@ -84,25 +97,33 @@ class CreateCustomAppIntegrationOutput:
     def as_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.client_id is not None: body['client_id'] = self.client_id
-        if self.client_secret is not None: body['client_secret'] = self.client_secret
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.client_id is not None:
+            body["client_id"] = self.client_id
+        if self.client_secret is not None:
+            body["client_secret"] = self.client_secret
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.client_id is not None: body['client_id'] = self.client_id
-        if self.client_secret is not None: body['client_secret'] = self.client_secret
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.client_id is not None:
+            body["client_id"] = self.client_id
+        if self.client_secret is not None:
+            body["client_secret"] = self.client_secret
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput:
         """Deserializes the CreateCustomAppIntegrationOutput from a dictionary."""
-        return cls(client_id=d.get('client_id', None),
-                   client_secret=d.get('client_secret', None),
-                   integration_id=d.get('integration_id', None))
+        return cls(
+            client_id=d.get("client_id", None),
+            client_secret=d.get("client_secret", None),
+            integration_id=d.get("integration_id", None),
+        )
 
 
 @dataclass
@@ -116,22 +137,28 @@ class CreatePublishedAppIntegration:
     def as_dict(self) -> dict:
         """Serializes the CreatePublishedAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.app_id is not None: body['app_id'] = self.app_id
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.app_id is not None:
+            body["app_id"] = self.app_id
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.app_id is not None: body['app_id'] = self.app_id
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.app_id is not None:
+            body["app_id"] = self.app_id
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration:
         """Deserializes the CreatePublishedAppIntegration from a dictionary."""
-        return cls(app_id=d.get('app_id', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+        return cls(
+            app_id=d.get("app_id", None),
+            token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy),
+        )
 
 
 @dataclass
@@ -142,19 +169,21 @@ class CreatePublishedAppIntegrationOutput:
     def as_dict(self) -> dict:
         """Serializes the CreatePublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegrationOutput:
         """Deserializes the CreatePublishedAppIntegrationOutput from a dictionary."""
-        return cls(integration_id=d.get('integration_id', None))
+        return cls(integration_id=d.get("integration_id", None))
 
 
 @dataclass
@@ -180,34 +209,48 @@ class CreateServicePrincipalSecretResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateServicePrincipalSecretResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.id is not None: body['id'] = self.id
-        if self.secret is not None: body['secret'] = self.secret
-        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
-        if self.status is not None: body['status'] = self.status
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.secret is not None:
+            body["secret"] = self.secret
+        if self.secret_hash is not None:
+            body["secret_hash"] = self.secret_hash
+        if self.status is not None:
+            body["status"] = self.status
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateServicePrincipalSecretResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.id is not None: body['id'] = self.id
-        if self.secret is not None: body['secret'] = self.secret
-        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
-        if self.status is not None: body['status'] = self.status
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.secret is not None:
+            body["secret"] = self.secret
+        if self.secret_hash is not None:
+            body["secret_hash"] = self.secret_hash
+        if self.status is not None:
+            body["status"] = self.status
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse:
         """Deserializes the CreateServicePrincipalSecretResponse from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   id=d.get('id', None),
-                   secret=d.get('secret', None),
-                   secret_hash=d.get('secret_hash', None),
-                   status=d.get('status', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            id=d.get("id", None),
+            secret=d.get("secret", None),
+            secret_hash=d.get("secret_hash", None),
+            status=d.get("status", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -296,34 +339,48 @@ class FederationPolicy:
     def as_dict(self) -> dict:
         """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict()
-        if self.uid is not None: body['uid'] = self.uid
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.oidc_policy:
+            body["oidc_policy"] = self.oidc_policy.as_dict()
+        if self.uid is not None:
+            body["uid"] = self.uid
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.oidc_policy: body['oidc_policy'] = self.oidc_policy
-        if self.uid is not None: body['uid'] = self.uid
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.oidc_policy:
+            body["oidc_policy"] = self.oidc_policy
+        if self.uid is not None:
+            body["uid"] = self.uid
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FederationPolicy:
         """Deserializes the FederationPolicy from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   description=d.get('description', None),
-                   name=d.get('name', None),
-                   oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy),
-                   uid=d.get('uid', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            description=d.get("description", None),
+            name=d.get("name", None),
+            oidc_policy=_from_dict(d, "oidc_policy", OidcFederationPolicy),
+            uid=d.get("uid", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -361,50 +418,73 @@ class GetCustomAppIntegrationOutput:
     def as_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.client_id is not None: body['client_id'] = self.client_id
-        if self.confidential is not None: body['confidential'] = self.confidential
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.creator_username is not None: body['creator_username'] = self.creator_username
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.name is not None: body['name'] = self.name
-        if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.client_id is not None:
+            body["client_id"] = self.client_id
+        if self.confidential is not None:
+            body["confidential"] = self.confidential
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.creator_username is not None:
+            body["creator_username"] = self.creator_username
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.redirect_urls:
+            body["redirect_urls"] = [v for v in self.redirect_urls]
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy.as_dict()
         if self.user_authorized_scopes:
-            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
+            body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.client_id is not None: body['client_id'] = self.client_id
-        if self.confidential is not None: body['confidential'] = self.confidential
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.creator_username is not None: body['creator_username'] = self.creator_username
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.name is not None: body['name'] = self.name
-        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
-        if self.scopes: body['scopes'] = self.scopes
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
-        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
+        if self.client_id is not None:
+            body["client_id"] = self.client_id
+        if self.confidential is not None:
+            body["confidential"] = self.confidential
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.creator_username is not None:
+            body["creator_username"] = self.creator_username
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.redirect_urls:
+            body["redirect_urls"] = self.redirect_urls
+        if self.scopes:
+            body["scopes"] = self.scopes
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy
+        if self.user_authorized_scopes:
+            body["user_authorized_scopes"] = self.user_authorized_scopes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput:
         """Deserializes the GetCustomAppIntegrationOutput from a dictionary."""
-        return cls(client_id=d.get('client_id', None),
-                   confidential=d.get('confidential', None),
-                   create_time=d.get('create_time', None),
-                   created_by=d.get('created_by', None),
-                   creator_username=d.get('creator_username', None),
-                   integration_id=d.get('integration_id', None),
-                   name=d.get('name', None),
-                   redirect_urls=d.get('redirect_urls', None),
-                   scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
-                   user_authorized_scopes=d.get('user_authorized_scopes', None))
+        return cls(
+            client_id=d.get("client_id", None),
+            confidential=d.get("confidential", None),
+            create_time=d.get("create_time", None),
+            created_by=d.get("created_by", None),
+            creator_username=d.get("creator_username", None),
+            integration_id=d.get("integration_id", None),
+            name=d.get("name", None),
+            redirect_urls=d.get("redirect_urls", None),
+            scopes=d.get("scopes", None),
+            token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy),
+            user_authorized_scopes=d.get("user_authorized_scopes", None),
+        )
 
 
 @dataclass
@@ -417,22 +497,28 @@ class GetCustomAppIntegrationsOutput:
     def as_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationsOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationsOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apps: body['apps'] = self.apps
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = self.apps
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput:
         """Deserializes the GetCustomAppIntegrationsOutput from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            apps=_repeated_dict(d, "apps", GetCustomAppIntegrationOutput),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -456,34 +542,48 @@ class GetPublishedAppIntegrationOutput:
     def as_dict(self) -> dict:
         """Serializes the GetPublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.app_id is not None: body['app_id'] = self.app_id
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.name is not None: body['name'] = self.name
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.app_id is not None:
+            body["app_id"] = self.app_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.app_id is not None: body['app_id'] = self.app_id
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.name is not None: body['name'] = self.name
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.app_id is not None:
+            body["app_id"] = self.app_id
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput:
         """Deserializes the GetPublishedAppIntegrationOutput from a dictionary."""
-        return cls(app_id=d.get('app_id', None),
-                   create_time=d.get('create_time', None),
-                   created_by=d.get('created_by', None),
-                   integration_id=d.get('integration_id', None),
-                   name=d.get('name', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+        return cls(
+            app_id=d.get("app_id", None),
+            create_time=d.get("create_time", None),
+            created_by=d.get("created_by", None),
+            integration_id=d.get("integration_id", None),
+            name=d.get("name", None),
+            token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy),
+        )
 
 
 @dataclass
@@ -496,22 +596,28 @@ class GetPublishedAppIntegrationsOutput:
     def as_dict(self) -> dict:
         """Serializes the GetPublishedAppIntegrationsOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPublishedAppIntegrationsOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apps: body['apps'] = self.apps
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = self.apps
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput:
         """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            apps=_repeated_dict(d, "apps", GetPublishedAppIntegrationOutput),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -526,22 +632,28 @@ class GetPublishedAppsOutput:
     def as_dict(self) -> dict:
         """Serializes the GetPublishedAppsOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apps: body['apps'] = [v.as_dict() for v in self.apps]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = [v.as_dict() for v in self.apps]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPublishedAppsOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apps: body['apps'] = self.apps
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.apps:
+            body["apps"] = self.apps
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput:
         """Deserializes the GetPublishedAppsOutput from a dictionary."""
-        return cls(apps=_repeated_dict(d, 'apps', PublishedAppOutput),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            apps=_repeated_dict(d, "apps", PublishedAppOutput),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -553,22 +665,28 @@ class ListFederationPoliciesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.policies:
+            body["policies"] = [v.as_dict() for v in self.policies]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.policies: body['policies'] = self.policies
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.policies:
+            body["policies"] = self.policies
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFederationPoliciesResponse:
         """Deserializes the ListFederationPoliciesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   policies=_repeated_dict(d, 'policies', FederationPolicy))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            policies=_repeated_dict(d, "policies", FederationPolicy),
+        )
 
 
 @dataclass
@@ -582,22 +700,28 @@ class ListServicePrincipalSecretsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.secrets:
+            body["secrets"] = [v.as_dict() for v in self.secrets]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListServicePrincipalSecretsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.secrets: body['secrets'] = self.secrets
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.secrets:
+            body["secrets"] = self.secrets
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
         """Deserializes the ListServicePrincipalSecretsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   secrets=_repeated_dict(d, 'secrets', SecretInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            secrets=_repeated_dict(d, "secrets", SecretInfo),
+        )
 
 
 @dataclass
@@ -631,31 +755,43 @@ class OidcFederationPolicy:
     def as_dict(self) -> dict:
         """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.audiences: body['audiences'] = [v for v in self.audiences]
-        if self.issuer is not None: body['issuer'] = self.issuer
-        if self.jwks_json is not None: body['jwks_json'] = self.jwks_json
-        if self.subject is not None: body['subject'] = self.subject
-        if self.subject_claim is not None: body['subject_claim'] = self.subject_claim
+        if self.audiences:
+            body["audiences"] = [v for v in self.audiences]
+        if self.issuer is not None:
+            body["issuer"] = self.issuer
+        if self.jwks_json is not None:
+            body["jwks_json"] = self.jwks_json
+        if self.subject is not None:
+            body["subject"] = self.subject
+        if self.subject_claim is not None:
+            body["subject_claim"] = self.subject_claim
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.audiences: body['audiences'] = self.audiences
-        if self.issuer is not None: body['issuer'] = self.issuer
-        if self.jwks_json is not None: body['jwks_json'] = self.jwks_json
-        if self.subject is not None: body['subject'] = self.subject
-        if self.subject_claim is not None: body['subject_claim'] = self.subject_claim
+        if self.audiences:
+            body["audiences"] = self.audiences
+        if self.issuer is not None:
+            body["issuer"] = self.issuer
+        if self.jwks_json is not None:
+            body["jwks_json"] = self.jwks_json
+        if self.subject is not None:
+            body["subject"] = self.subject
+        if self.subject_claim is not None:
+            body["subject_claim"] = self.subject_claim
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OidcFederationPolicy:
         """Deserializes the OidcFederationPolicy from a dictionary."""
-        return cls(audiences=d.get('audiences', None),
-                   issuer=d.get('issuer', None),
-                   jwks_json=d.get('jwks_json', None),
-                   subject=d.get('subject', None),
-                   subject_claim=d.get('subject_claim', None))
+        return cls(
+            audiences=d.get("audiences", None),
+            issuer=d.get("issuer", None),
+            jwks_json=d.get("jwks_json", None),
+            subject=d.get("subject", None),
+            subject_claim=d.get("subject_claim", None),
+        )
 
 
 @dataclass
@@ -685,39 +821,53 @@ class PublishedAppOutput:
     def as_dict(self) -> dict:
         """Serializes the PublishedAppOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.app_id is not None: body['app_id'] = self.app_id
-        if self.client_id is not None: body['client_id'] = self.client_id
-        if self.description is not None: body['description'] = self.description
+        if self.app_id is not None:
+            body["app_id"] = self.app_id
+        if self.client_id is not None:
+            body["client_id"] = self.client_id
+        if self.description is not None:
+            body["description"] = self.description
         if self.is_confidential_client is not None:
-            body['is_confidential_client'] = self.is_confidential_client
-        if self.name is not None: body['name'] = self.name
-        if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
+            body["is_confidential_client"] = self.is_confidential_client
+        if self.name is not None:
+            body["name"] = self.name
+        if self.redirect_urls:
+            body["redirect_urls"] = [v for v in self.redirect_urls]
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PublishedAppOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.app_id is not None: body['app_id'] = self.app_id
-        if self.client_id is not None: body['client_id'] = self.client_id
-        if self.description is not None: body['description'] = self.description
+        if self.app_id is not None:
+            body["app_id"] = self.app_id
+        if self.client_id is not None:
+            body["client_id"] = self.client_id
+        if self.description is not None:
+            body["description"] = self.description
         if self.is_confidential_client is not None:
-            body['is_confidential_client'] = self.is_confidential_client
-        if self.name is not None: body['name'] = self.name
-        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
-        if self.scopes: body['scopes'] = self.scopes
+            body["is_confidential_client"] = self.is_confidential_client
+        if self.name is not None:
+            body["name"] = self.name
+        if self.redirect_urls:
+            body["redirect_urls"] = self.redirect_urls
+        if self.scopes:
+            body["scopes"] = self.scopes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishedAppOutput:
         """Deserializes the PublishedAppOutput from a dictionary."""
-        return cls(app_id=d.get('app_id', None),
-                   client_id=d.get('client_id', None),
-                   description=d.get('description', None),
-                   is_confidential_client=d.get('is_confidential_client', None),
-                   name=d.get('name', None),
-                   redirect_urls=d.get('redirect_urls', None),
-                   scopes=d.get('scopes', None))
+        return cls(
+            app_id=d.get("app_id", None),
+            client_id=d.get("client_id", None),
+            description=d.get("description", None),
+            is_confidential_client=d.get("is_confidential_client", None),
+            name=d.get("name", None),
+            redirect_urls=d.get("redirect_urls", None),
+            scopes=d.get("scopes", None),
+        )
 
 
 @dataclass
@@ -740,31 +890,43 @@ class SecretInfo:
     def as_dict(self) -> dict:
         """Serializes the SecretInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.id is not None: body['id'] = self.id
-        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
-        if self.status is not None: body['status'] = self.status
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.secret_hash is not None:
+            body["secret_hash"] = self.secret_hash
+        if self.status is not None:
+            body["status"] = self.status
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SecretInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.id is not None: body['id'] = self.id
-        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
-        if self.status is not None: body['status'] = self.status
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.secret_hash is not None:
+            body["secret_hash"] = self.secret_hash
+        if self.status is not None:
+            body["status"] = self.status
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretInfo:
         """Deserializes the SecretInfo from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   id=d.get('id', None),
-                   secret_hash=d.get('secret_hash', None),
-                   status=d.get('status', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            id=d.get("id", None),
+            secret_hash=d.get("secret_hash", None),
+            status=d.get("status", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -779,25 +941,27 @@ def as_dict(self) -> dict:
         """Serializes the TokenAccessPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_token_ttl_in_minutes is not None:
-            body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes
+            body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes
         if self.refresh_token_ttl_in_minutes is not None:
-            body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes
+            body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.access_token_ttl_in_minutes is not None:
-            body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes
+            body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes
         if self.refresh_token_ttl_in_minutes is not None:
-            body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes
+            body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy:
         """Deserializes the TokenAccessPolicy from a dictionary."""
-        return cls(access_token_ttl_in_minutes=d.get('access_token_ttl_in_minutes', None),
-                   refresh_token_ttl_in_minutes=d.get('refresh_token_ttl_in_minutes', None))
+        return cls(
+            access_token_ttl_in_minutes=d.get("access_token_ttl_in_minutes", None),
+            refresh_token_ttl_in_minutes=d.get("refresh_token_ttl_in_minutes", None),
+        )
 
 
 @dataclass
@@ -821,32 +985,43 @@ class UpdateCustomAppIntegration:
     def as_dict(self) -> dict:
         """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.redirect_urls:
+            body["redirect_urls"] = [v for v in self.redirect_urls]
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy.as_dict()
         if self.user_authorized_scopes:
-            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
+            body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
-        if self.scopes: body['scopes'] = self.scopes
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
-        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.redirect_urls:
+            body["redirect_urls"] = self.redirect_urls
+        if self.scopes:
+            body["scopes"] = self.scopes
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy
+        if self.user_authorized_scopes:
+            body["user_authorized_scopes"] = self.user_authorized_scopes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegration:
         """Deserializes the UpdateCustomAppIntegration from a dictionary."""
-        return cls(integration_id=d.get('integration_id', None),
-                   redirect_urls=d.get('redirect_urls', None),
-                   scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
-                   user_authorized_scopes=d.get('user_authorized_scopes', None))
+        return cls(
+            integration_id=d.get("integration_id", None),
+            redirect_urls=d.get("redirect_urls", None),
+            scopes=d.get("scopes", None),
+            token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy),
+            user_authorized_scopes=d.get("user_authorized_scopes", None),
+        )
 
 
 @dataclass
@@ -878,22 +1053,28 @@ class UpdatePublishedAppIntegration:
     def as_dict(self) -> dict:
         """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.integration_id is not None: body['integration_id'] = self.integration_id
-        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.integration_id is not None:
+            body["integration_id"] = self.integration_id
+        if self.token_access_policy:
+            body["token_access_policy"] = self.token_access_policy
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegration:
         """Deserializes the UpdatePublishedAppIntegration from a dictionary."""
-        return cls(integration_id=d.get('integration_id', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+        return cls(
+            integration_id=d.get("integration_id", None),
+            token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy),
+        )
 
 
 @dataclass
@@ -917,20 +1098,20 @@ def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput:
 
 class AccountFederationPolicyAPI:
     """These APIs manage account federation policies.
-    
+
     Account federation policies allow users and service principals in your Databricks account to securely
     access Databricks APIs using tokens from your trusted identity providers (IdPs).
-    
+
     With token federation, your users and service principals can exchange tokens from your IdP for Databricks
     OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
     Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
     Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
     synchronized into your Databricks account.
-    
+
     Token federation is configured in your Databricks account using an account federation policy. An account
     federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
     how to determine which Databricks user, or subject, a token is issued for
-    
+
     To configure a federation policy, you provide the following: * The required token __issuer__, as specified
     in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
     token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
@@ -941,117 +1122,143 @@ class AccountFederationPolicyAPI:
     public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
     Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
     strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
-    
+
     An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
     subject_claim: "sub" ```
-    
+
     An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
     `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
     "username@mycompany.com" } ```
-    
+
     You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
     your users do not already have the ability to generate tokens that are compatible with your federation
     policy.
-    
+
     You do not need to configure an OAuth application in Databricks to use token federation.
-    
+
     [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html"""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               policy: Optional[FederationPolicy] = None,
-               policy_id: Optional[str] = None) -> FederationPolicy:
+    def create(
+        self,
+        *,
+        policy: Optional[FederationPolicy] = None,
+        policy_id: Optional[str] = None,
+    ) -> FederationPolicy:
         """Create account federation policy.
-        
+
         :param policy: :class:`FederationPolicy` (optional)
         :param policy_id: str (optional)
           The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
           characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
-        
+
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
         query = {}
-        if policy_id is not None: query['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies',
-                           query=query,
-                           body=body,
-                           headers=headers)
+        if policy_id is not None:
+            query["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/federationPolicies",
+            query=query,
+            body=body,
+            headers=headers,
+        )
         return FederationPolicy.from_dict(res)
 
     def delete(self, policy_id: str):
         """Delete account federation policy.
-        
+
         :param policy_id: str
           The identifier for the federation policy.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}",
+            headers=headers,
+        )
 
     def get(self, policy_id: str) -> FederationPolicy:
         """Get account federation policy.
-        
+
         :param policy_id: str
           The identifier for the federation policy.
-        
+
         :returns: :class:`FederationPolicy`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}",
+            headers=headers,
+        )
         return FederationPolicy.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[FederationPolicy]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[FederationPolicy]:
         """List account federation policies.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`FederationPolicy`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/federationPolicies',
-                                query=query,
-                                headers=headers)
-            if 'policies' in json:
-                for v in json['policies']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/federationPolicies",
+                query=query,
+                headers=headers,
+            )
+            if "policies" in json:
+                for v in json["policies"]:
                     yield FederationPolicy.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               policy_id: str,
-               *,
-               policy: Optional[FederationPolicy] = None,
-               update_mask: Optional[str] = None) -> FederationPolicy:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        policy_id: str,
+        *,
+        policy: Optional[FederationPolicy] = None,
+        update_mask: Optional[str] = None,
+    ) -> FederationPolicy:
         """Update account federation policy.
-        
+
         :param policy_id: str
           The identifier for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
@@ -1061,43 +1268,52 @@ def update(self,
           should be updated (full replacement). If unspecified, all fields that are set in the policy provided
           in the update request will overwrite the corresponding fields in the existing policy. Example value:
           'description,oidc_policy.audiences'.
-        
+
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
         query = {}
-        if update_mask is not None: query['update_mask'] = update_mask
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
-                           query=query,
-                           body=body,
-                           headers=headers)
+        if update_mask is not None:
+            query["update_mask"] = update_mask
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}",
+            query=query,
+            body=body,
+            headers=headers,
+        )
         return FederationPolicy.from_dict(res)
 
 
 class CustomAppIntegrationAPI:
     """These APIs enable administrators to manage custom OAuth app integrations, which is required for
-    adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
+    adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               confidential: Optional[bool] = None,
-               name: Optional[str] = None,
-               redirect_urls: Optional[List[str]] = None,
-               scopes: Optional[List[str]] = None,
-               token_access_policy: Optional[TokenAccessPolicy] = None,
-               user_authorized_scopes: Optional[List[str]] = None) -> CreateCustomAppIntegrationOutput:
+    def create(
+        self,
+        *,
+        confidential: Optional[bool] = None,
+        name: Optional[str] = None,
+        redirect_urls: Optional[List[str]] = None,
+        scopes: Optional[List[str]] = None,
+        token_access_policy: Optional[TokenAccessPolicy] = None,
+        user_authorized_scopes: Optional[List[str]] = None,
+    ) -> CreateCustomAppIntegrationOutput:
         """Create Custom OAuth App Integration.
-        
+
         Create Custom OAuth App Integration.
-        
+
         You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
-        
+
         :param confidential: bool (optional)
           This field indicates whether an OAuth client secret is required to authenticate this client.
         :param name: str (optional)
@@ -1112,108 +1328,135 @@ def create(self,
         :param user_authorized_scopes: List[str] (optional)
           Scopes that will need to be consented by end user to mint the access token. If the user does not
           authorize the access token will not be minted. Must be a subset of scopes.
-        
+
         :returns: :class:`CreateCustomAppIntegrationOutput`
         """
         body = {}
-        if confidential is not None: body['confidential'] = confidential
-        if name is not None: body['name'] = name
-        if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
-        if scopes is not None: body['scopes'] = [v for v in scopes]
-        if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
+        if confidential is not None:
+            body["confidential"] = confidential
+        if name is not None:
+            body["name"] = name
+        if redirect_urls is not None:
+            body["redirect_urls"] = [v for v in redirect_urls]
+        if scopes is not None:
+            body["scopes"] = [v for v in scopes]
+        if token_access_policy is not None:
+            body["token_access_policy"] = token_access_policy.as_dict()
         if user_authorized_scopes is not None:
-            body['user_authorized_scopes'] = [v for v in user_authorized_scopes]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["user_authorized_scopes"] = [v for v in user_authorized_scopes]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations",
+            body=body,
+            headers=headers,
+        )
         return CreateCustomAppIntegrationOutput.from_dict(res)
 
     def delete(self, integration_id: str):
         """Delete Custom OAuth App Integration.
-        
+
         Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
         :method:CustomAppIntegration/get.
-        
+
         :param integration_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}",
+            headers=headers,
+        )
 
     def get(self, integration_id: str) -> GetCustomAppIntegrationOutput:
         """Get OAuth Custom App Integration.
-        
+
         Gets the Custom OAuth App Integration for the given integration id.
-        
+
         :param integration_id: str
           The OAuth app integration ID.
-        
+
         :returns: :class:`GetCustomAppIntegrationOutput`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}",
+            headers=headers,
+        )
         return GetCustomAppIntegrationOutput.from_dict(res)
 
-    def list(self,
-             *,
-             include_creator_username: Optional[bool] = None,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[GetCustomAppIntegrationOutput]:
+    def list(
+        self,
+        *,
+        include_creator_username: Optional[bool] = None,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[GetCustomAppIntegrationOutput]:
         """Get custom oauth app integrations.
-        
+
         Get the list of custom OAuth app integrations for the specified Databricks account
-        
+
         :param include_creator_username: bool (optional)
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
         """
 
         query = {}
-        if include_creator_username is not None: query['include_creator_username'] = include_creator_username
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if include_creator_username is not None:
+            query["include_creator_username"] = include_creator_username
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations',
-                                query=query,
-                                headers=headers)
-            if 'apps' in json:
-                for v in json['apps']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations",
+                query=query,
+                headers=headers,
+            )
+            if "apps" in json:
+                for v in json["apps"]:
                     yield GetCustomAppIntegrationOutput.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               integration_id: str,
-               *,
-               redirect_urls: Optional[List[str]] = None,
-               scopes: Optional[List[str]] = None,
-               token_access_policy: Optional[TokenAccessPolicy] = None,
-               user_authorized_scopes: Optional[List[str]] = None):
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        integration_id: str,
+        *,
+        redirect_urls: Optional[List[str]] = None,
+        scopes: Optional[List[str]] = None,
+        token_access_policy: Optional[TokenAccessPolicy] = None,
+        user_authorized_scopes: Optional[List[str]] = None,
+    ):
         """Updates Custom OAuth App Integration.
-        
+
         Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
         via :method:CustomAppIntegration/get.
-        
+
         :param integration_id: str
         :param redirect_urls: List[str] (optional)
           List of OAuth redirect urls to be updated in the custom OAuth app integration
@@ -1225,22 +1468,29 @@ def update(self,
         :param user_authorized_scopes: List[str] (optional)
           Scopes that will need to be consented by end user to mint the access token. If the user does not
           authorize the access token will not be minted. Must be a subset of scopes.
-        
-        
+
+
         """
         body = {}
-        if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
-        if scopes is not None: body['scopes'] = [v for v in scopes]
-        if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
+        if redirect_urls is not None:
+            body["redirect_urls"] = [v for v in redirect_urls]
+        if scopes is not None:
+            body["scopes"] = [v for v in scopes]
+        if token_access_policy is not None:
+            body["token_access_policy"] = token_access_policy.as_dict()
         if user_authorized_scopes is not None:
-            body['user_authorized_scopes'] = [v for v in user_authorized_scopes]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["user_authorized_scopes"] = [v for v in user_authorized_scopes]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
 
 
 class OAuthPublishedAppsAPI:
@@ -1251,184 +1501,225 @@ class OAuthPublishedAppsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[PublishedAppOutput]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[PublishedAppOutput]:
         """Get all the published OAuth apps.
-        
+
         Get all the available published OAuth apps in Databricks.
-        
+
         :param page_size: int (optional)
           The max number of OAuth published apps to return in one page.
         :param page_token: str (optional)
           A token that can be used to get the next page of results.
-        
+
         :returns: Iterator over :class:`PublishedAppOutput`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps',
-                                query=query,
-                                headers=headers)
-            if 'apps' in json:
-                for v in json['apps']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps",
+                query=query,
+                headers=headers,
+            )
+            if "apps" in json:
+                for v in json["apps"]:
                     yield PublishedAppOutput.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class PublishedAppIntegrationAPI:
     """These APIs enable administrators to manage published OAuth app integrations, which is required for
-    adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
+    adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create(
-            self,
-            *,
-            app_id: Optional[str] = None,
-            token_access_policy: Optional[TokenAccessPolicy] = None) -> CreatePublishedAppIntegrationOutput:
+        self,
+        *,
+        app_id: Optional[str] = None,
+        token_access_policy: Optional[TokenAccessPolicy] = None,
+    ) -> CreatePublishedAppIntegrationOutput:
         """Create Published OAuth App Integration.
-        
+
         Create Published OAuth App Integration.
-        
+
         You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
-        
+
         :param app_id: str (optional)
           App id of the OAuth published app integration. For example power-bi, tableau-deskop
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
-        
+
         :returns: :class:`CreatePublishedAppIntegrationOutput`
         """
         body = {}
-        if app_id is not None: body['app_id'] = app_id
-        if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations',
-                           body=body,
-                           headers=headers)
+        if app_id is not None:
+            body["app_id"] = app_id
+        if token_access_policy is not None:
+            body["token_access_policy"] = token_access_policy.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations",
+            body=body,
+            headers=headers,
+        )
         return CreatePublishedAppIntegrationOutput.from_dict(res)
 
     def delete(self, integration_id: str):
         """Delete Published OAuth App Integration.
-        
+
         Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
         integration via :method:PublishedAppIntegration/get.
-        
+
         :param integration_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}",
+            headers=headers,
+        )
 
     def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput:
         """Get OAuth Published App Integration.
-        
+
         Gets the Published OAuth App Integration for the given integration id.
-        
+
         :param integration_id: str
-        
+
         :returns: :class:`GetPublishedAppIntegrationOutput`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}",
+            headers=headers,
+        )
         return GetPublishedAppIntegrationOutput.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[GetPublishedAppIntegrationOutput]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[GetPublishedAppIntegrationOutput]:
         """Get published oauth app integrations.
-        
+
         Get the list of published OAuth app integrations for the specified Databricks account
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations',
-                                query=query,
-                                headers=headers)
-            if 'apps' in json:
-                for v in json['apps']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations",
+                query=query,
+                headers=headers,
+            )
+            if "apps" in json:
+                for v in json["apps"]:
                     yield GetPublishedAppIntegrationOutput.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None):
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        integration_id: str,
+        *,
+        token_access_policy: Optional[TokenAccessPolicy] = None,
+    ):
         """Updates Published OAuth App Integration.
-        
+
         Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
         integration via :method:PublishedAppIntegration/get.
-        
+
         :param integration_id: str
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the published OAuth app integration
-        
-        
+
+
         """
         body = {}
-        if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if token_access_policy is not None:
+            body["token_access_policy"] = token_access_policy.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
 
 
 class ServicePrincipalFederationPolicyAPI:
     """These APIs manage service principal federation policies.
-    
+
     Service principal federation, also known as Workload Identity Federation, allows your automated workloads
     running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
     With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
     Databricks service principal, using tokens provided by the workload runtime.
-    
+
     Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
     automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
     possible. Workload Identity Federation is supported by many popular services, including Github Actions,
     Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
-    
+
     Workload identity federation is configured in your Databricks account using a service principal federation
     policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
     allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
     Databricks service principal
-    
+
     To configure a federation policy, you provide the following: * The required token __issuer__, as specified
     in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
     workload identity provider. * The required token __subject__, as specified in the “sub” claim of
@@ -1440,129 +1731,152 @@ class ServicePrincipalFederationPolicyAPI:
     of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
     fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
     the issuer’s well known endpoint for discovering public keys.
-    
+
     An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
     "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
     "repo:my-github-org/my-repo:environment:prod" ```
-    
+
     An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
     { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
     "repo:my-github-org/my-repo:environment:prod" } ```
-    
+
     You may also need to configure the workload runtime to generate tokens for your workloads.
-    
-    You do not need to configure an OAuth application in Databricks to use token federation."""
+
+    You do not need to configure an OAuth application in Databricks to use token federation.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               service_principal_id: int,
-               *,
-               policy: Optional[FederationPolicy] = None,
-               policy_id: Optional[str] = None) -> FederationPolicy:
+    def create(
+        self,
+        service_principal_id: int,
+        *,
+        policy: Optional[FederationPolicy] = None,
+        policy_id: Optional[str] = None,
+    ) -> FederationPolicy:
         """Create service principal federation policy.
-        
+
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
         :param policy_id: str (optional)
           The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
           characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
-        
+
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
         query = {}
-        if policy_id is not None: query['policy_id'] = policy_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if policy_id is not None:
+            query["policy_id"] = policy_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'POST',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies',
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies",
             query=query,
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return FederationPolicy.from_dict(res)
 
     def delete(self, service_principal_id: int, policy_id: str):
         """Delete service principal federation policy.
-        
+
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
           The identifier for the federation policy.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}",
+            headers=headers,
+        )
 
     def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy:
         """Get service principal federation policy.
-        
+
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
           The identifier for the federation policy.
-        
+
         :returns: :class:`FederationPolicy`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}",
+            headers=headers,
+        )
         return FederationPolicy.from_dict(res)
 
-    def list(self,
-             service_principal_id: int,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[FederationPolicy]:
+    def list(
+        self,
+        service_principal_id: int,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[FederationPolicy]:
         """List service principal federation policies.
-        
+
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`FederationPolicy`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
             json = self._api.do(
-                'GET',
-                f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies',
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies",
                 query=query,
-                headers=headers)
-            if 'policies' in json:
-                for v in json['policies']:
+                headers=headers,
+            )
+            if "policies" in json:
+                for v in json["policies"]:
                     yield FederationPolicy.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               service_principal_id: int,
-               policy_id: str,
-               *,
-               policy: Optional[FederationPolicy] = None,
-               update_mask: Optional[str] = None) -> FederationPolicy:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        service_principal_id: int,
+        policy_id: str,
+        *,
+        policy: Optional[FederationPolicy] = None,
+        update_mask: Optional[str] = None,
+    ) -> FederationPolicy:
         """Update service principal federation policy.
-        
+
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
@@ -1574,84 +1888,94 @@ def update(self,
           should be updated (full replacement). If unspecified, all fields that are set in the policy provided
           in the update request will overwrite the corresponding fields in the existing policy. Example value:
           'description,oidc_policy.audiences'.
-        
+
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
         query = {}
-        if update_mask is not None: query['update_mask'] = update_mask
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if update_mask is not None:
+            query["update_mask"] = update_mask
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}",
             query=query,
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return FederationPolicy.from_dict(res)
 
 
 class ServicePrincipalSecretsAPI:
     """These APIs enable administrators to manage service principal secrets.
-    
+
     You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be
     used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using
     OAuth tokens for service principals],
-    
+
     In addition, the generated secrets can be used to configure the Databricks Terraform Provider to
     authenticate with the service principal. For more information, see [Databricks Terraform Provider].
-    
+
     [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html
-    [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal"""
+    [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create(self, service_principal_id: int) -> CreateServicePrincipalSecretResponse:
         """Create service principal secret.
-        
+
         Create a secret for the given service principal.
-        
+
         :param service_principal_id: int
           The service principal ID.
-        
+
         :returns: :class:`CreateServicePrincipalSecretResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'POST',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
-            headers=headers)
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets",
+            headers=headers,
+        )
         return CreateServicePrincipalSecretResponse.from_dict(res)
 
     def delete(self, service_principal_id: int, secret_id: str):
         """Delete service principal secret.
-        
+
         Delete a secret from the given service principal.
-        
+
         :param service_principal_id: int
           The service principal ID.
         :param secret_id: str
           The secret ID.
-        
-        
+
+
         """
 
         headers = {}
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}",
+            headers=headers,
+        )
 
     def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]:
         """List service principal secrets.
-        
+
         List all secrets associated with the given service principal. This operation only returns information
         about the secrets themselves and does not include the secret values.
-        
+
         :param service_principal_id: int
           The service principal ID.
         :param page_token: str (optional)
@@ -1661,23 +1985,27 @@ def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -
           previous request. To list all of the secrets for a service principal, it is necessary to continue
           requesting pages of entries until the response contains no `next_page_token`. Note that the number
           of entries returned must not be used to determine when the listing is complete.
-        
+
         :returns: Iterator over :class:`SecretInfo`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
             json = self._api.do(
-                'GET',
-                f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets",
                 query=query,
-                headers=headers)
-            if 'secrets' in json:
-                for v in json['secrets']:
+                headers=headers,
+            )
+            if "secrets" in json:
+                for v in json["secrets"]:
                     yield SecretInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index db5d698d6..7d84de54c 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 from databricks.sdk.service import compute
 
@@ -114,94 +114,148 @@ class CreatePipeline:
     def as_dict(self) -> dict:
         """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.channel is not None: body['channel'] = self.channel
-        if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
-        if self.configuration: body['configuration'] = self.configuration
-        if self.continuous is not None: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment.as_dict()
-        if self.development is not None: body['development'] = self.development
-        if self.dry_run is not None: body['dry_run'] = self.dry_run
-        if self.edition is not None: body['edition'] = self.edition
-        if self.filters: body['filters'] = self.filters.as_dict()
-        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict()
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.name is not None: body['name'] = self.name
-        if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
-        if self.photon is not None: body['photon'] = self.photon
-        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
-        if self.run_as: body['run_as'] = self.run_as.as_dict()
-        if self.schema is not None: body['schema'] = self.schema
-        if self.serverless is not None: body['serverless'] = self.serverless
-        if self.storage is not None: body['storage'] = self.storage
-        if self.target is not None: body['target'] = self.target
-        if self.trigger: body['trigger'] = self.trigger.as_dict()
+        if self.allow_duplicate_names is not None:
+            body["allow_duplicate_names"] = self.allow_duplicate_names
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.channel is not None:
+            body["channel"] = self.channel
+        if self.clusters:
+            body["clusters"] = [v.as_dict() for v in self.clusters]
+        if self.configuration:
+            body["configuration"] = self.configuration
+        if self.continuous is not None:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment.as_dict()
+        if self.development is not None:
+            body["development"] = self.development
+        if self.dry_run is not None:
+            body["dry_run"] = self.dry_run
+        if self.edition is not None:
+            body["edition"] = self.edition
+        if self.filters:
+            body["filters"] = self.filters.as_dict()
+        if self.gateway_definition:
+            body["gateway_definition"] = self.gateway_definition.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.ingestion_definition:
+            body["ingestion_definition"] = self.ingestion_definition.as_dict()
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notifications:
+            body["notifications"] = [v.as_dict() for v in self.notifications]
+        if self.photon is not None:
+            body["photon"] = self.photon
+        if self.restart_window:
+            body["restart_window"] = self.restart_window.as_dict()
+        if self.run_as:
+            body["run_as"] = self.run_as.as_dict()
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.serverless is not None:
+            body["serverless"] = self.serverless
+        if self.storage is not None:
+            body["storage"] = self.storage
+        if self.target is not None:
+            body["target"] = self.target
+        if self.trigger:
+            body["trigger"] = self.trigger.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.channel is not None: body['channel'] = self.channel
-        if self.clusters: body['clusters'] = self.clusters
-        if self.configuration: body['configuration'] = self.configuration
-        if self.continuous is not None: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment
-        if self.development is not None: body['development'] = self.development
-        if self.dry_run is not None: body['dry_run'] = self.dry_run
-        if self.edition is not None: body['edition'] = self.edition
-        if self.filters: body['filters'] = self.filters
-        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
-        if self.id is not None: body['id'] = self.id
-        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
-        if self.libraries: body['libraries'] = self.libraries
-        if self.name is not None: body['name'] = self.name
-        if self.notifications: body['notifications'] = self.notifications
-        if self.photon is not None: body['photon'] = self.photon
-        if self.restart_window: body['restart_window'] = self.restart_window
-        if self.run_as: body['run_as'] = self.run_as
-        if self.schema is not None: body['schema'] = self.schema
-        if self.serverless is not None: body['serverless'] = self.serverless
-        if self.storage is not None: body['storage'] = self.storage
-        if self.target is not None: body['target'] = self.target
-        if self.trigger: body['trigger'] = self.trigger
+        if self.allow_duplicate_names is not None:
+            body["allow_duplicate_names"] = self.allow_duplicate_names
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.channel is not None:
+            body["channel"] = self.channel
+        if self.clusters:
+            body["clusters"] = self.clusters
+        if self.configuration:
+            body["configuration"] = self.configuration
+        if self.continuous is not None:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment
+        if self.development is not None:
+            body["development"] = self.development
+        if self.dry_run is not None:
+            body["dry_run"] = self.dry_run
+        if self.edition is not None:
+            body["edition"] = self.edition
+        if self.filters:
+            body["filters"] = self.filters
+        if self.gateway_definition:
+            body["gateway_definition"] = self.gateway_definition
+        if self.id is not None:
+            body["id"] = self.id
+        if self.ingestion_definition:
+            body["ingestion_definition"] = self.ingestion_definition
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notifications:
+            body["notifications"] = self.notifications
+        if self.photon is not None:
+            body["photon"] = self.photon
+        if self.restart_window:
+            body["restart_window"] = self.restart_window
+        if self.run_as:
+            body["run_as"] = self.run_as
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.serverless is not None:
+            body["serverless"] = self.serverless
+        if self.storage is not None:
+            body["storage"] = self.storage
+        if self.target is not None:
+            body["target"] = self.target
+        if self.trigger:
+            body["trigger"] = self.trigger
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
         """Deserializes the CreatePipeline from a dictionary."""
-        return cls(allow_duplicate_names=d.get('allow_duplicate_names', None),
-                   budget_policy_id=d.get('budget_policy_id', None),
-                   catalog=d.get('catalog', None),
-                   channel=d.get('channel', None),
-                   clusters=_repeated_dict(d, 'clusters', PipelineCluster),
-                   configuration=d.get('configuration', None),
-                   continuous=d.get('continuous', None),
-                   deployment=_from_dict(d, 'deployment', PipelineDeployment),
-                   development=d.get('development', None),
-                   dry_run=d.get('dry_run', None),
-                   edition=d.get('edition', None),
-                   filters=_from_dict(d, 'filters', Filters),
-                   gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
-                   id=d.get('id', None),
-                   ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition),
-                   libraries=_repeated_dict(d, 'libraries', PipelineLibrary),
-                   name=d.get('name', None),
-                   notifications=_repeated_dict(d, 'notifications', Notifications),
-                   photon=d.get('photon', None),
-                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
-                   run_as=_from_dict(d, 'run_as', RunAs),
-                   schema=d.get('schema', None),
-                   serverless=d.get('serverless', None),
-                   storage=d.get('storage', None),
-                   target=d.get('target', None),
-                   trigger=_from_dict(d, 'trigger', PipelineTrigger))
+        return cls(
+            allow_duplicate_names=d.get("allow_duplicate_names", None),
+            budget_policy_id=d.get("budget_policy_id", None),
+            catalog=d.get("catalog", None),
+            channel=d.get("channel", None),
+            clusters=_repeated_dict(d, "clusters", PipelineCluster),
+            configuration=d.get("configuration", None),
+            continuous=d.get("continuous", None),
+            deployment=_from_dict(d, "deployment", PipelineDeployment),
+            development=d.get("development", None),
+            dry_run=d.get("dry_run", None),
+            edition=d.get("edition", None),
+            filters=_from_dict(d, "filters", Filters),
+            gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
+            id=d.get("id", None),
+            ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
+            libraries=_repeated_dict(d, "libraries", PipelineLibrary),
+            name=d.get("name", None),
+            notifications=_repeated_dict(d, "notifications", Notifications),
+            photon=d.get("photon", None),
+            restart_window=_from_dict(d, "restart_window", RestartWindow),
+            run_as=_from_dict(d, "run_as", RunAs),
+            schema=d.get("schema", None),
+            serverless=d.get("serverless", None),
+            storage=d.get("storage", None),
+            target=d.get("target", None),
+            trigger=_from_dict(d, "trigger", PipelineTrigger),
+        )
 
 
 @dataclass
@@ -215,22 +269,28 @@ class CreatePipelineResponse:
     def as_dict(self) -> dict:
         """Serializes the CreatePipelineResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.effective_settings: body['effective_settings'] = self.effective_settings.as_dict()
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.effective_settings:
+            body["effective_settings"] = self.effective_settings.as_dict()
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.effective_settings: body['effective_settings'] = self.effective_settings
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.effective_settings:
+            body["effective_settings"] = self.effective_settings
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePipelineResponse:
         """Deserializes the CreatePipelineResponse from a dictionary."""
-        return cls(effective_settings=_from_dict(d, 'effective_settings', PipelineSpec),
-                   pipeline_id=d.get('pipeline_id', None))
+        return cls(
+            effective_settings=_from_dict(d, "effective_settings", PipelineSpec),
+            pipeline_id=d.get("pipeline_id", None),
+        )
 
 
 @dataclass
@@ -242,22 +302,28 @@ class CronTrigger:
     def as_dict(self) -> dict:
         """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        if self.quartz_cron_schedule is not None:
+            body["quartz_cron_schedule"] = self.quartz_cron_schedule
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CronTrigger into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule
-        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        if self.quartz_cron_schedule is not None:
+            body["quartz_cron_schedule"] = self.quartz_cron_schedule
+        if self.timezone_id is not None:
+            body["timezone_id"] = self.timezone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronTrigger:
         """Deserializes the CronTrigger from a dictionary."""
-        return cls(quartz_cron_schedule=d.get('quartz_cron_schedule', None),
-                   timezone_id=d.get('timezone_id', None))
+        return cls(
+            quartz_cron_schedule=d.get("quartz_cron_schedule", None),
+            timezone_id=d.get("timezone_id", None),
+        )
 
 
 @dataclass
@@ -271,34 +337,38 @@ class DataPlaneId:
     def as_dict(self) -> dict:
         """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.instance is not None: body['instance'] = self.instance
-        if self.seq_no is not None: body['seq_no'] = self.seq_no
+        if self.instance is not None:
+            body["instance"] = self.instance
+        if self.seq_no is not None:
+            body["seq_no"] = self.seq_no
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.instance is not None: body['instance'] = self.instance
-        if self.seq_no is not None: body['seq_no'] = self.seq_no
+        if self.instance is not None:
+            body["instance"] = self.instance
+        if self.seq_no is not None:
+            body["seq_no"] = self.seq_no
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneId:
         """Deserializes the DataPlaneId from a dictionary."""
-        return cls(instance=d.get('instance', None), seq_no=d.get('seq_no', None))
+        return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None))
 
 
 class DayOfWeek(Enum):
     """Days of week in which the restart is allowed to happen (within a five-hour window starting at
     start_hour). If not specified all days of the week will be used."""
 
-    FRIDAY = 'FRIDAY'
-    MONDAY = 'MONDAY'
-    SATURDAY = 'SATURDAY'
-    SUNDAY = 'SUNDAY'
-    THURSDAY = 'THURSDAY'
-    TUESDAY = 'TUESDAY'
-    WEDNESDAY = 'WEDNESDAY'
+    FRIDAY = "FRIDAY"
+    MONDAY = "MONDAY"
+    SATURDAY = "SATURDAY"
+    SUNDAY = "SUNDAY"
+    THURSDAY = "THURSDAY"
+    TUESDAY = "TUESDAY"
+    WEDNESDAY = "WEDNESDAY"
 
 
 @dataclass
@@ -324,7 +394,7 @@ class DeploymentKind(Enum):
     """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a
     Databricks Asset Bundle."""
 
-    BUNDLE = 'BUNDLE'
+    BUNDLE = "BUNDLE"
 
 
 @dataclass
@@ -426,99 +496,153 @@ class EditPipeline:
     def as_dict(self) -> dict:
         """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.channel is not None: body['channel'] = self.channel
-        if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
-        if self.configuration: body['configuration'] = self.configuration
-        if self.continuous is not None: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment.as_dict()
-        if self.development is not None: body['development'] = self.development
-        if self.edition is not None: body['edition'] = self.edition
+        if self.allow_duplicate_names is not None:
+            body["allow_duplicate_names"] = self.allow_duplicate_names
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.channel is not None:
+            body["channel"] = self.channel
+        if self.clusters:
+            body["clusters"] = [v.as_dict() for v in self.clusters]
+        if self.configuration:
+            body["configuration"] = self.configuration
+        if self.continuous is not None:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment.as_dict()
+        if self.development is not None:
+            body["development"] = self.development
+        if self.edition is not None:
+            body["edition"] = self.edition
         if self.expected_last_modified is not None:
-            body['expected_last_modified'] = self.expected_last_modified
-        if self.filters: body['filters'] = self.filters.as_dict()
-        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict()
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.name is not None: body['name'] = self.name
-        if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
-        if self.photon is not None: body['photon'] = self.photon
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
-        if self.run_as: body['run_as'] = self.run_as.as_dict()
-        if self.schema is not None: body['schema'] = self.schema
-        if self.serverless is not None: body['serverless'] = self.serverless
-        if self.storage is not None: body['storage'] = self.storage
-        if self.target is not None: body['target'] = self.target
-        if self.trigger: body['trigger'] = self.trigger.as_dict()
+            body["expected_last_modified"] = self.expected_last_modified
+        if self.filters:
+            body["filters"] = self.filters.as_dict()
+        if self.gateway_definition:
+            body["gateway_definition"] = self.gateway_definition.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.ingestion_definition:
+            body["ingestion_definition"] = self.ingestion_definition.as_dict()
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notifications:
+            body["notifications"] = [v.as_dict() for v in self.notifications]
+        if self.photon is not None:
+            body["photon"] = self.photon
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.restart_window:
+            body["restart_window"] = self.restart_window.as_dict()
+        if self.run_as:
+            body["run_as"] = self.run_as.as_dict()
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.serverless is not None:
+            body["serverless"] = self.serverless
+        if self.storage is not None:
+            body["storage"] = self.storage
+        if self.target is not None:
+            body["target"] = self.target
+        if self.trigger:
+            body["trigger"] = self.trigger.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EditPipeline into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.channel is not None: body['channel'] = self.channel
-        if self.clusters: body['clusters'] = self.clusters
-        if self.configuration: body['configuration'] = self.configuration
-        if self.continuous is not None: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment
-        if self.development is not None: body['development'] = self.development
-        if self.edition is not None: body['edition'] = self.edition
+        if self.allow_duplicate_names is not None:
+            body["allow_duplicate_names"] = self.allow_duplicate_names
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.channel is not None:
+            body["channel"] = self.channel
+        if self.clusters:
+            body["clusters"] = self.clusters
+        if self.configuration:
+            body["configuration"] = self.configuration
+        if self.continuous is not None:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment
+        if self.development is not None:
+            body["development"] = self.development
+        if self.edition is not None:
+            body["edition"] = self.edition
         if self.expected_last_modified is not None:
-            body['expected_last_modified'] = self.expected_last_modified
-        if self.filters: body['filters'] = self.filters
-        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
-        if self.id is not None: body['id'] = self.id
-        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
-        if self.libraries: body['libraries'] = self.libraries
-        if self.name is not None: body['name'] = self.name
-        if self.notifications: body['notifications'] = self.notifications
-        if self.photon is not None: body['photon'] = self.photon
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.restart_window: body['restart_window'] = self.restart_window
-        if self.run_as: body['run_as'] = self.run_as
-        if self.schema is not None: body['schema'] = self.schema
-        if self.serverless is not None: body['serverless'] = self.serverless
-        if self.storage is not None: body['storage'] = self.storage
-        if self.target is not None: body['target'] = self.target
-        if self.trigger: body['trigger'] = self.trigger
+            body["expected_last_modified"] = self.expected_last_modified
+        if self.filters:
+            body["filters"] = self.filters
+        if self.gateway_definition:
+            body["gateway_definition"] = self.gateway_definition
+        if self.id is not None:
+            body["id"] = self.id
+        if self.ingestion_definition:
+            body["ingestion_definition"] = self.ingestion_definition
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notifications:
+            body["notifications"] = self.notifications
+        if self.photon is not None:
+            body["photon"] = self.photon
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.restart_window:
+            body["restart_window"] = self.restart_window
+        if self.run_as:
+            body["run_as"] = self.run_as
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.serverless is not None:
+            body["serverless"] = self.serverless
+        if self.storage is not None:
+            body["storage"] = self.storage
+        if self.target is not None:
+            body["target"] = self.target
+        if self.trigger:
+            body["trigger"] = self.trigger
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
         """Deserializes the EditPipeline from a dictionary."""
-        return cls(allow_duplicate_names=d.get('allow_duplicate_names', None),
-                   budget_policy_id=d.get('budget_policy_id', None),
-                   catalog=d.get('catalog', None),
-                   channel=d.get('channel', None),
-                   clusters=_repeated_dict(d, 'clusters', PipelineCluster),
-                   configuration=d.get('configuration', None),
-                   continuous=d.get('continuous', None),
-                   deployment=_from_dict(d, 'deployment', PipelineDeployment),
-                   development=d.get('development', None),
-                   edition=d.get('edition', None),
-                   expected_last_modified=d.get('expected_last_modified', None),
-                   filters=_from_dict(d, 'filters', Filters),
-                   gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
-                   id=d.get('id', None),
-                   ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition),
-                   libraries=_repeated_dict(d, 'libraries', PipelineLibrary),
-                   name=d.get('name', None),
-                   notifications=_repeated_dict(d, 'notifications', Notifications),
-                   photon=d.get('photon', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
-                   run_as=_from_dict(d, 'run_as', RunAs),
-                   schema=d.get('schema', None),
-                   serverless=d.get('serverless', None),
-                   storage=d.get('storage', None),
-                   target=d.get('target', None),
-                   trigger=_from_dict(d, 'trigger', PipelineTrigger))
+        return cls(
+            allow_duplicate_names=d.get("allow_duplicate_names", None),
+            budget_policy_id=d.get("budget_policy_id", None),
+            catalog=d.get("catalog", None),
+            channel=d.get("channel", None),
+            clusters=_repeated_dict(d, "clusters", PipelineCluster),
+            configuration=d.get("configuration", None),
+            continuous=d.get("continuous", None),
+            deployment=_from_dict(d, "deployment", PipelineDeployment),
+            development=d.get("development", None),
+            edition=d.get("edition", None),
+            expected_last_modified=d.get("expected_last_modified", None),
+            filters=_from_dict(d, "filters", Filters),
+            gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
+            id=d.get("id", None),
+            ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
+            libraries=_repeated_dict(d, "libraries", PipelineLibrary),
+            name=d.get("name", None),
+            notifications=_repeated_dict(d, "notifications", Notifications),
+            photon=d.get("photon", None),
+            pipeline_id=d.get("pipeline_id", None),
+            restart_window=_from_dict(d, "restart_window", RestartWindow),
+            run_as=_from_dict(d, "run_as", RunAs),
+            schema=d.get("schema", None),
+            serverless=d.get("serverless", None),
+            storage=d.get("storage", None),
+            target=d.get("target", None),
+            trigger=_from_dict(d, "trigger", PipelineTrigger),
+        )
 
 
 @dataclass
@@ -551,31 +675,37 @@ class ErrorDetail:
     def as_dict(self) -> dict:
         """Serializes the ErrorDetail into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exceptions: body['exceptions'] = [v.as_dict() for v in self.exceptions]
-        if self.fatal is not None: body['fatal'] = self.fatal
+        if self.exceptions:
+            body["exceptions"] = [v.as_dict() for v in self.exceptions]
+        if self.fatal is not None:
+            body["fatal"] = self.fatal
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ErrorDetail into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exceptions: body['exceptions'] = self.exceptions
-        if self.fatal is not None: body['fatal'] = self.fatal
+        if self.exceptions:
+            body["exceptions"] = self.exceptions
+        if self.fatal is not None:
+            body["fatal"] = self.fatal
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ErrorDetail:
         """Deserializes the ErrorDetail from a dictionary."""
-        return cls(exceptions=_repeated_dict(d, 'exceptions', SerializedException),
-                   fatal=d.get('fatal', None))
+        return cls(
+            exceptions=_repeated_dict(d, "exceptions", SerializedException),
+            fatal=d.get("fatal", None),
+        )
 
 
 class EventLevel(Enum):
     """The severity level of the event."""
 
-    ERROR = 'ERROR'
-    INFO = 'INFO'
-    METRICS = 'METRICS'
-    WARN = 'WARN'
+    ERROR = "ERROR"
+    INFO = "INFO"
+    METRICS = "METRICS"
+    WARN = "WARN"
 
 
 @dataclass
@@ -586,19 +716,21 @@ class FileLibrary:
     def as_dict(self) -> dict:
         """Serializes the FileLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FileLibrary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileLibrary:
         """Deserializes the FileLibrary from a dictionary."""
-        return cls(path=d.get('path', None))
+        return cls(path=d.get("path", None))
 
 
 @dataclass
@@ -612,21 +744,25 @@ class Filters:
     def as_dict(self) -> dict:
         """Serializes the Filters into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.exclude: body['exclude'] = [v for v in self.exclude]
-        if self.include: body['include'] = [v for v in self.include]
+        if self.exclude:
+            body["exclude"] = [v for v in self.exclude]
+        if self.include:
+            body["include"] = [v for v in self.include]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Filters into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.exclude: body['exclude'] = self.exclude
-        if self.include: body['include'] = self.include
+        if self.exclude:
+            body["exclude"] = self.exclude
+        if self.include:
+            body["include"] = self.include
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Filters:
         """Deserializes the Filters from a dictionary."""
-        return cls(exclude=d.get('exclude', None), include=d.get('include', None))
+        return cls(exclude=d.get("exclude", None), include=d.get("include", None))
 
 
 @dataclass
@@ -637,19 +773,21 @@ class GetPipelinePermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetPipelinePermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPipelinePermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPipelinePermissionLevelsResponse:
         """Deserializes the GetPipelinePermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', PipelinePermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", PipelinePermissionsDescription))
 
 
 @dataclass
@@ -693,61 +831,85 @@ class GetPipelineResponse:
     def as_dict(self) -> dict:
         """Serializes the GetPipelineResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
-            body['effective_budget_policy_id'] = self.effective_budget_policy_id
-        if self.health is not None: body['health'] = self.health.value
-        if self.last_modified is not None: body['last_modified'] = self.last_modified
-        if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates]
-        if self.name is not None: body['name'] = self.name
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
-        if self.spec: body['spec'] = self.spec.as_dict()
-        if self.state is not None: body['state'] = self.state.value
+            body["effective_budget_policy_id"] = self.effective_budget_policy_id
+        if self.health is not None:
+            body["health"] = self.health.value
+        if self.last_modified is not None:
+            body["last_modified"] = self.last_modified
+        if self.latest_updates:
+            body["latest_updates"] = [v.as_dict() for v in self.latest_updates]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.run_as_user_name is not None:
+            body["run_as_user_name"] = self.run_as_user_name
+        if self.spec:
+            body["spec"] = self.spec.as_dict()
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetPipelineResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
-            body['effective_budget_policy_id'] = self.effective_budget_policy_id
-        if self.health is not None: body['health'] = self.health
-        if self.last_modified is not None: body['last_modified'] = self.last_modified
-        if self.latest_updates: body['latest_updates'] = self.latest_updates
-        if self.name is not None: body['name'] = self.name
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
-        if self.spec: body['spec'] = self.spec
-        if self.state is not None: body['state'] = self.state
+            body["effective_budget_policy_id"] = self.effective_budget_policy_id
+        if self.health is not None:
+            body["health"] = self.health
+        if self.last_modified is not None:
+            body["last_modified"] = self.last_modified
+        if self.latest_updates:
+            body["latest_updates"] = self.latest_updates
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.run_as_user_name is not None:
+            body["run_as_user_name"] = self.run_as_user_name
+        if self.spec:
+            body["spec"] = self.spec
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPipelineResponse:
         """Deserializes the GetPipelineResponse from a dictionary."""
-        return cls(cause=d.get('cause', None),
-                   cluster_id=d.get('cluster_id', None),
-                   creator_user_name=d.get('creator_user_name', None),
-                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
-                   health=_enum(d, 'health', GetPipelineResponseHealth),
-                   last_modified=d.get('last_modified', None),
-                   latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo),
-                   name=d.get('name', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   run_as_user_name=d.get('run_as_user_name', None),
-                   spec=_from_dict(d, 'spec', PipelineSpec),
-                   state=_enum(d, 'state', PipelineState))
+        return cls(
+            cause=d.get("cause", None),
+            cluster_id=d.get("cluster_id", None),
+            creator_user_name=d.get("creator_user_name", None),
+            effective_budget_policy_id=d.get("effective_budget_policy_id", None),
+            health=_enum(d, "health", GetPipelineResponseHealth),
+            last_modified=d.get("last_modified", None),
+            latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo),
+            name=d.get("name", None),
+            pipeline_id=d.get("pipeline_id", None),
+            run_as_user_name=d.get("run_as_user_name", None),
+            spec=_from_dict(d, "spec", PipelineSpec),
+            state=_enum(d, "state", PipelineState),
+        )
 
 
 class GetPipelineResponseHealth(Enum):
     """The health of a pipeline."""
 
-    HEALTHY = 'HEALTHY'
-    UNHEALTHY = 'UNHEALTHY'
+    HEALTHY = "HEALTHY"
+    UNHEALTHY = "UNHEALTHY"
 
 
 @dataclass
@@ -758,19 +920,21 @@ class GetUpdateResponse:
     def as_dict(self) -> dict:
         """Serializes the GetUpdateResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.update: body['update'] = self.update.as_dict()
+        if self.update:
+            body["update"] = self.update.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetUpdateResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.update: body['update'] = self.update
+        if self.update:
+            body["update"] = self.update
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
         """Deserializes the GetUpdateResponse from a dictionary."""
-        return cls(update=_from_dict(d, 'update', UpdateInfo))
+        return cls(update=_from_dict(d, "update", UpdateInfo))
 
 
 @dataclass
@@ -787,25 +951,33 @@ class IngestionConfig:
     def as_dict(self) -> dict:
         """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.report: body['report'] = self.report.as_dict()
-        if self.schema: body['schema'] = self.schema.as_dict()
-        if self.table: body['table'] = self.table.as_dict()
+        if self.report:
+            body["report"] = self.report.as_dict()
+        if self.schema:
+            body["schema"] = self.schema.as_dict()
+        if self.table:
+            body["table"] = self.table.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the IngestionConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.report: body['report'] = self.report
-        if self.schema: body['schema'] = self.schema
-        if self.table: body['table'] = self.table
+        if self.report:
+            body["report"] = self.report
+        if self.schema:
+            body["schema"] = self.schema
+        if self.table:
+            body["table"] = self.table
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
         """Deserializes the IngestionConfig from a dictionary."""
-        return cls(report=_from_dict(d, 'report', ReportSpec),
-                   schema=_from_dict(d, 'schema', SchemaSpec),
-                   table=_from_dict(d, 'table', TableSpec))
+        return cls(
+            report=_from_dict(d, "report", ReportSpec),
+            schema=_from_dict(d, "schema", SchemaSpec),
+            table=_from_dict(d, "table", TableSpec),
+        )
 
 
 @dataclass
@@ -832,35 +1004,43 @@ class IngestionGatewayPipelineDefinition:
     def as_dict(self) -> dict:
         """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.connection_id is not None: body['connection_id'] = self.connection_id
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.connection_id is not None:
+            body["connection_id"] = self.connection_id
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
         if self.gateway_storage_catalog is not None:
-            body['gateway_storage_catalog'] = self.gateway_storage_catalog
-        if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
+            body["gateway_storage_catalog"] = self.gateway_storage_catalog
+        if self.gateway_storage_name is not None:
+            body["gateway_storage_name"] = self.gateway_storage_name
         if self.gateway_storage_schema is not None:
-            body['gateway_storage_schema'] = self.gateway_storage_schema
+            body["gateway_storage_schema"] = self.gateway_storage_schema
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the IngestionGatewayPipelineDefinition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connection_id is not None: body['connection_id'] = self.connection_id
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.connection_id is not None:
+            body["connection_id"] = self.connection_id
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
         if self.gateway_storage_catalog is not None:
-            body['gateway_storage_catalog'] = self.gateway_storage_catalog
-        if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
+            body["gateway_storage_catalog"] = self.gateway_storage_catalog
+        if self.gateway_storage_name is not None:
+            body["gateway_storage_name"] = self.gateway_storage_name
         if self.gateway_storage_schema is not None:
-            body['gateway_storage_schema'] = self.gateway_storage_schema
+            body["gateway_storage_schema"] = self.gateway_storage_schema
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
         """Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
-        return cls(connection_id=d.get('connection_id', None),
-                   connection_name=d.get('connection_name', None),
-                   gateway_storage_catalog=d.get('gateway_storage_catalog', None),
-                   gateway_storage_name=d.get('gateway_storage_name', None),
-                   gateway_storage_schema=d.get('gateway_storage_schema', None))
+        return cls(
+            connection_id=d.get("connection_id", None),
+            connection_name=d.get("connection_name", None),
+            gateway_storage_catalog=d.get("gateway_storage_catalog", None),
+            gateway_storage_name=d.get("gateway_storage_name", None),
+            gateway_storage_schema=d.get("gateway_storage_schema", None),
+        )
 
 
 @dataclass
@@ -883,28 +1063,38 @@ class IngestionPipelineDefinition:
     def as_dict(self) -> dict:
         """Serializes the IngestionPipelineDefinition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
-        if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
-        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.ingestion_gateway_id is not None:
+            body["ingestion_gateway_id"] = self.ingestion_gateway_id
+        if self.objects:
+            body["objects"] = [v.as_dict() for v in self.objects]
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the IngestionPipelineDefinition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
-        if self.objects: body['objects'] = self.objects
-        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.ingestion_gateway_id is not None:
+            body["ingestion_gateway_id"] = self.ingestion_gateway_id
+        if self.objects:
+            body["objects"] = self.objects
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition:
         """Deserializes the IngestionPipelineDefinition from a dictionary."""
-        return cls(connection_name=d.get('connection_name', None),
-                   ingestion_gateway_id=d.get('ingestion_gateway_id', None),
-                   objects=_repeated_dict(d, 'objects', IngestionConfig),
-                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+        return cls(
+            connection_name=d.get("connection_name", None),
+            ingestion_gateway_id=d.get("ingestion_gateway_id", None),
+            objects=_repeated_dict(d, "objects", IngestionConfig),
+            table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig),
+        )
 
 
 @dataclass
@@ -921,25 +1111,33 @@ class ListPipelineEventsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListPipelineEventsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.events: body['events'] = [v.as_dict() for v in self.events]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.events:
+            body["events"] = [v.as_dict() for v in self.events]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListPipelineEventsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.events: body['events'] = self.events
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.events:
+            body["events"] = self.events
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPipelineEventsResponse:
         """Deserializes the ListPipelineEventsResponse from a dictionary."""
-        return cls(events=_repeated_dict(d, 'events', PipelineEvent),
-                   next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None))
+        return cls(
+            events=_repeated_dict(d, "events", PipelineEvent),
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+        )
 
 
 @dataclass
@@ -953,22 +1151,28 @@ class ListPipelinesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListPipelinesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.statuses:
+            body["statuses"] = [v.as_dict() for v in self.statuses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListPipelinesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.statuses: body['statuses'] = self.statuses
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.statuses:
+            body["statuses"] = self.statuses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPipelinesResponse:
         """Deserializes the ListPipelinesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   statuses=_repeated_dict(d, 'statuses', PipelineStateInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            statuses=_repeated_dict(d, "statuses", PipelineStateInfo),
+        )
 
 
 @dataclass
@@ -985,25 +1189,33 @@ class ListUpdatesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListUpdatesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
-        if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
+        if self.updates:
+            body["updates"] = [v.as_dict() for v in self.updates]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListUpdatesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
-        if self.updates: body['updates'] = self.updates
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.prev_page_token is not None:
+            body["prev_page_token"] = self.prev_page_token
+        if self.updates:
+            body["updates"] = self.updates
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse:
         """Deserializes the ListUpdatesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   prev_page_token=d.get('prev_page_token', None),
-                   updates=_repeated_dict(d, 'updates', UpdateInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            prev_page_token=d.get("prev_page_token", None),
+            updates=_repeated_dict(d, "updates", UpdateInfo),
+        )
 
 
 @dataclass
@@ -1028,9 +1240,9 @@ def from_dict(cls, d: Dict[str, any]) -> ManualTrigger:
 class MaturityLevel(Enum):
     """Maturity level for EventDetails."""
 
-    DEPRECATED = 'DEPRECATED'
-    EVOLVING = 'EVOLVING'
-    STABLE = 'STABLE'
+    DEPRECATED = "DEPRECATED"
+    EVOLVING = "EVOLVING"
+    STABLE = "STABLE"
 
 
 @dataclass
@@ -1041,19 +1253,21 @@ class NotebookLibrary:
     def as_dict(self) -> dict:
         """Serializes the NotebookLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NotebookLibrary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookLibrary:
         """Deserializes the NotebookLibrary from a dictionary."""
-        return cls(path=d.get('path', None))
+        return cls(path=d.get("path", None))
 
 
 @dataclass
@@ -1072,21 +1286,28 @@ class Notifications:
     def as_dict(self) -> dict:
         """Serializes the Notifications into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alerts: body['alerts'] = [v for v in self.alerts]
-        if self.email_recipients: body['email_recipients'] = [v for v in self.email_recipients]
+        if self.alerts:
+            body["alerts"] = [v for v in self.alerts]
+        if self.email_recipients:
+            body["email_recipients"] = [v for v in self.email_recipients]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Notifications into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alerts: body['alerts'] = self.alerts
-        if self.email_recipients: body['email_recipients'] = self.email_recipients
+        if self.alerts:
+            body["alerts"] = self.alerts
+        if self.email_recipients:
+            body["email_recipients"] = self.email_recipients
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Notifications:
         """Deserializes the Notifications from a dictionary."""
-        return cls(alerts=d.get('alerts', None), email_recipients=d.get('email_recipients', None))
+        return cls(
+            alerts=d.get("alerts", None),
+            email_recipients=d.get("email_recipients", None),
+        )
 
 
 @dataclass
@@ -1146,67 +1367,103 @@ class Origin:
     def as_dict(self) -> dict:
         """Serializes the Origin into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.batch_id is not None: body['batch_id'] = self.batch_id
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.dataset_name is not None: body['dataset_name'] = self.dataset_name
-        if self.flow_id is not None: body['flow_id'] = self.flow_id
-        if self.flow_name is not None: body['flow_name'] = self.flow_name
-        if self.host is not None: body['host'] = self.host
-        if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id
-        if self.materialization_name is not None: body['materialization_name'] = self.materialization_name
-        if self.org_id is not None: body['org_id'] = self.org_id
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name
-        if self.region is not None: body['region'] = self.region
-        if self.request_id is not None: body['request_id'] = self.request_id
-        if self.table_id is not None: body['table_id'] = self.table_id
-        if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id
-        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.batch_id is not None:
+            body["batch_id"] = self.batch_id
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.dataset_name is not None:
+            body["dataset_name"] = self.dataset_name
+        if self.flow_id is not None:
+            body["flow_id"] = self.flow_id
+        if self.flow_name is not None:
+            body["flow_name"] = self.flow_name
+        if self.host is not None:
+            body["host"] = self.host
+        if self.maintenance_id is not None:
+            body["maintenance_id"] = self.maintenance_id
+        if self.materialization_name is not None:
+            body["materialization_name"] = self.materialization_name
+        if self.org_id is not None:
+            body["org_id"] = self.org_id
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.pipeline_name is not None:
+            body["pipeline_name"] = self.pipeline_name
+        if self.region is not None:
+            body["region"] = self.region
+        if self.request_id is not None:
+            body["request_id"] = self.request_id
+        if self.table_id is not None:
+            body["table_id"] = self.table_id
+        if self.uc_resource_id is not None:
+            body["uc_resource_id"] = self.uc_resource_id
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Origin into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.batch_id is not None: body['batch_id'] = self.batch_id
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.dataset_name is not None: body['dataset_name'] = self.dataset_name
-        if self.flow_id is not None: body['flow_id'] = self.flow_id
-        if self.flow_name is not None: body['flow_name'] = self.flow_name
-        if self.host is not None: body['host'] = self.host
-        if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id
-        if self.materialization_name is not None: body['materialization_name'] = self.materialization_name
-        if self.org_id is not None: body['org_id'] = self.org_id
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name
-        if self.region is not None: body['region'] = self.region
-        if self.request_id is not None: body['request_id'] = self.request_id
-        if self.table_id is not None: body['table_id'] = self.table_id
-        if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id
-        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.batch_id is not None:
+            body["batch_id"] = self.batch_id
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.dataset_name is not None:
+            body["dataset_name"] = self.dataset_name
+        if self.flow_id is not None:
+            body["flow_id"] = self.flow_id
+        if self.flow_name is not None:
+            body["flow_name"] = self.flow_name
+        if self.host is not None:
+            body["host"] = self.host
+        if self.maintenance_id is not None:
+            body["maintenance_id"] = self.maintenance_id
+        if self.materialization_name is not None:
+            body["materialization_name"] = self.materialization_name
+        if self.org_id is not None:
+            body["org_id"] = self.org_id
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.pipeline_name is not None:
+            body["pipeline_name"] = self.pipeline_name
+        if self.region is not None:
+            body["region"] = self.region
+        if self.request_id is not None:
+            body["request_id"] = self.request_id
+        if self.table_id is not None:
+            body["table_id"] = self.table_id
+        if self.uc_resource_id is not None:
+            body["uc_resource_id"] = self.uc_resource_id
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Origin:
         """Deserializes the Origin from a dictionary."""
-        return cls(batch_id=d.get('batch_id', None),
-                   cloud=d.get('cloud', None),
-                   cluster_id=d.get('cluster_id', None),
-                   dataset_name=d.get('dataset_name', None),
-                   flow_id=d.get('flow_id', None),
-                   flow_name=d.get('flow_name', None),
-                   host=d.get('host', None),
-                   maintenance_id=d.get('maintenance_id', None),
-                   materialization_name=d.get('materialization_name', None),
-                   org_id=d.get('org_id', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   pipeline_name=d.get('pipeline_name', None),
-                   region=d.get('region', None),
-                   request_id=d.get('request_id', None),
-                   table_id=d.get('table_id', None),
-                   uc_resource_id=d.get('uc_resource_id', None),
-                   update_id=d.get('update_id', None))
+        return cls(
+            batch_id=d.get("batch_id", None),
+            cloud=d.get("cloud", None),
+            cluster_id=d.get("cluster_id", None),
+            dataset_name=d.get("dataset_name", None),
+            flow_id=d.get("flow_id", None),
+            flow_name=d.get("flow_name", None),
+            host=d.get("host", None),
+            maintenance_id=d.get("maintenance_id", None),
+            materialization_name=d.get("materialization_name", None),
+            org_id=d.get("org_id", None),
+            pipeline_id=d.get("pipeline_id", None),
+            pipeline_name=d.get("pipeline_name", None),
+            region=d.get("region", None),
+            request_id=d.get("request_id", None),
+            table_id=d.get("table_id", None),
+            uc_resource_id=d.get("uc_resource_id", None),
+            update_id=d.get("update_id", None),
+        )
 
 
 @dataclass
@@ -1226,30 +1483,38 @@ class PipelineAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the PipelineAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlRequest:
         """Deserializes the PipelineAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', PipelinePermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", PipelinePermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1272,33 +1537,43 @@ class PipelineAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the PipelineAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlResponse:
         """Deserializes the PipelineAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', PipelinePermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", PipelinePermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1407,78 +1682,112 @@ def as_dict(self) -> dict:
         """Serializes the PipelineCluster into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale.as_dict()
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict()
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict()
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict()
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale.as_dict()
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes.as_dict()
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes.as_dict()
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf.as_dict()
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
-        if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.label is not None: body['label'] = self.label
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes.as_dict()
+        if self.init_scripts:
+            body["init_scripts"] = [v.as_dict() for v in self.init_scripts]
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.label is not None:
+            body["label"] = self.label
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = [v for v in self.ssh_public_keys]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineCluster into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.apply_policy_default_values is not None:
-            body['apply_policy_default_values'] = self.apply_policy_default_values
-        if self.autoscale: body['autoscale'] = self.autoscale
-        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
-        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
-        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+            body["apply_policy_default_values"] = self.apply_policy_default_values
+        if self.autoscale:
+            body["autoscale"] = self.autoscale
+        if self.aws_attributes:
+            body["aws_attributes"] = self.aws_attributes
+        if self.azure_attributes:
+            body["azure_attributes"] = self.azure_attributes
+        if self.cluster_log_conf:
+            body["cluster_log_conf"] = self.cluster_log_conf
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         if self.driver_instance_pool_id is not None:
-            body['driver_instance_pool_id'] = self.driver_instance_pool_id
-        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+            body["driver_instance_pool_id"] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None:
+            body["driver_node_type_id"] = self.driver_node_type_id
         if self.enable_local_disk_encryption is not None:
-            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
-        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
-        if self.init_scripts: body['init_scripts'] = self.init_scripts
-        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
-        if self.label is not None: body['label'] = self.label
-        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
-        if self.num_workers is not None: body['num_workers'] = self.num_workers
-        if self.policy_id is not None: body['policy_id'] = self.policy_id
-        if self.spark_conf: body['spark_conf'] = self.spark_conf
-        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
-        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+            body["enable_local_disk_encryption"] = self.enable_local_disk_encryption
+        if self.gcp_attributes:
+            body["gcp_attributes"] = self.gcp_attributes
+        if self.init_scripts:
+            body["init_scripts"] = self.init_scripts
+        if self.instance_pool_id is not None:
+            body["instance_pool_id"] = self.instance_pool_id
+        if self.label is not None:
+            body["label"] = self.label
+        if self.node_type_id is not None:
+            body["node_type_id"] = self.node_type_id
+        if self.num_workers is not None:
+            body["num_workers"] = self.num_workers
+        if self.policy_id is not None:
+            body["policy_id"] = self.policy_id
+        if self.spark_conf:
+            body["spark_conf"] = self.spark_conf
+        if self.spark_env_vars:
+            body["spark_env_vars"] = self.spark_env_vars
+        if self.ssh_public_keys:
+            body["ssh_public_keys"] = self.ssh_public_keys
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineCluster:
         """Deserializes the PipelineCluster from a dictionary."""
-        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
-                   autoscale=_from_dict(d, 'autoscale', PipelineClusterAutoscale),
-                   aws_attributes=_from_dict(d, 'aws_attributes', compute.AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', compute.AzureAttributes),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', compute.ClusterLogConf),
-                   custom_tags=d.get('custom_tags', None),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
-                   gcp_attributes=_from_dict(d, 'gcp_attributes', compute.GcpAttributes),
-                   init_scripts=_repeated_dict(d, 'init_scripts', compute.InitScriptInfo),
-                   instance_pool_id=d.get('instance_pool_id', None),
-                   label=d.get('label', None),
-                   node_type_id=d.get('node_type_id', None),
-                   num_workers=d.get('num_workers', None),
-                   policy_id=d.get('policy_id', None),
-                   spark_conf=d.get('spark_conf', None),
-                   spark_env_vars=d.get('spark_env_vars', None),
-                   ssh_public_keys=d.get('ssh_public_keys', None))
+        return cls(
+            apply_policy_default_values=d.get("apply_policy_default_values", None),
+            autoscale=_from_dict(d, "autoscale", PipelineClusterAutoscale),
+            aws_attributes=_from_dict(d, "aws_attributes", compute.AwsAttributes),
+            azure_attributes=_from_dict(d, "azure_attributes", compute.AzureAttributes),
+            cluster_log_conf=_from_dict(d, "cluster_log_conf", compute.ClusterLogConf),
+            custom_tags=d.get("custom_tags", None),
+            driver_instance_pool_id=d.get("driver_instance_pool_id", None),
+            driver_node_type_id=d.get("driver_node_type_id", None),
+            enable_local_disk_encryption=d.get("enable_local_disk_encryption", None),
+            gcp_attributes=_from_dict(d, "gcp_attributes", compute.GcpAttributes),
+            init_scripts=_repeated_dict(d, "init_scripts", compute.InitScriptInfo),
+            instance_pool_id=d.get("instance_pool_id", None),
+            label=d.get("label", None),
+            node_type_id=d.get("node_type_id", None),
+            num_workers=d.get("num_workers", None),
+            policy_id=d.get("policy_id", None),
+            spark_conf=d.get("spark_conf", None),
+            spark_env_vars=d.get("spark_env_vars", None),
+            ssh_public_keys=d.get("ssh_public_keys", None),
+        )
 
 
 @dataclass
@@ -1500,25 +1809,33 @@ class PipelineClusterAutoscale:
     def as_dict(self) -> dict:
         """Serializes the PipelineClusterAutoscale into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.max_workers is not None: body['max_workers'] = self.max_workers
-        if self.min_workers is not None: body['min_workers'] = self.min_workers
-        if self.mode is not None: body['mode'] = self.mode.value
+        if self.max_workers is not None:
+            body["max_workers"] = self.max_workers
+        if self.min_workers is not None:
+            body["min_workers"] = self.min_workers
+        if self.mode is not None:
+            body["mode"] = self.mode.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineClusterAutoscale into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.max_workers is not None: body['max_workers'] = self.max_workers
-        if self.min_workers is not None: body['min_workers'] = self.min_workers
-        if self.mode is not None: body['mode'] = self.mode
+        if self.max_workers is not None:
+            body["max_workers"] = self.max_workers
+        if self.min_workers is not None:
+            body["min_workers"] = self.min_workers
+        if self.mode is not None:
+            body["mode"] = self.mode
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineClusterAutoscale:
         """Deserializes the PipelineClusterAutoscale from a dictionary."""
-        return cls(max_workers=d.get('max_workers', None),
-                   min_workers=d.get('min_workers', None),
-                   mode=_enum(d, 'mode', PipelineClusterAutoscaleMode))
+        return cls(
+            max_workers=d.get("max_workers", None),
+            min_workers=d.get("min_workers", None),
+            mode=_enum(d, "mode", PipelineClusterAutoscaleMode),
+        )
 
 
 class PipelineClusterAutoscaleMode(Enum):
@@ -1527,8 +1844,8 @@ class PipelineClusterAutoscaleMode(Enum):
     of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy
     autoscaling feature is used for `maintenance` clusters."""
 
-    ENHANCED = 'ENHANCED'
-    LEGACY = 'LEGACY'
+    ENHANCED = "ENHANCED"
+    LEGACY = "LEGACY"
 
 
 @dataclass
@@ -1542,22 +1859,28 @@ class PipelineDeployment:
     def as_dict(self) -> dict:
         """Serializes the PipelineDeployment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.kind is not None: body['kind'] = self.kind.value
-        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        if self.kind is not None:
+            body["kind"] = self.kind.value
+        if self.metadata_file_path is not None:
+            body["metadata_file_path"] = self.metadata_file_path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineDeployment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.kind is not None: body['kind'] = self.kind
-        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        if self.kind is not None:
+            body["kind"] = self.kind
+        if self.metadata_file_path is not None:
+            body["metadata_file_path"] = self.metadata_file_path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineDeployment:
         """Deserializes the PipelineDeployment from a dictionary."""
-        return cls(kind=_enum(d, 'kind', DeploymentKind),
-                   metadata_file_path=d.get('metadata_file_path', None))
+        return cls(
+            kind=_enum(d, "kind", DeploymentKind),
+            metadata_file_path=d.get("metadata_file_path", None),
+        )
 
 
 @dataclass
@@ -1592,43 +1915,63 @@ class PipelineEvent:
     def as_dict(self) -> dict:
         """Serializes the PipelineEvent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error: body['error'] = self.error.as_dict()
-        if self.event_type is not None: body['event_type'] = self.event_type
-        if self.id is not None: body['id'] = self.id
-        if self.level is not None: body['level'] = self.level.value
-        if self.maturity_level is not None: body['maturity_level'] = self.maturity_level.value
-        if self.message is not None: body['message'] = self.message
-        if self.origin: body['origin'] = self.origin.as_dict()
-        if self.sequence: body['sequence'] = self.sequence.as_dict()
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.error:
+            body["error"] = self.error.as_dict()
+        if self.event_type is not None:
+            body["event_type"] = self.event_type
+        if self.id is not None:
+            body["id"] = self.id
+        if self.level is not None:
+            body["level"] = self.level.value
+        if self.maturity_level is not None:
+            body["maturity_level"] = self.maturity_level.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.origin:
+            body["origin"] = self.origin.as_dict()
+        if self.sequence:
+            body["sequence"] = self.sequence.as_dict()
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineEvent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error: body['error'] = self.error
-        if self.event_type is not None: body['event_type'] = self.event_type
-        if self.id is not None: body['id'] = self.id
-        if self.level is not None: body['level'] = self.level
-        if self.maturity_level is not None: body['maturity_level'] = self.maturity_level
-        if self.message is not None: body['message'] = self.message
-        if self.origin: body['origin'] = self.origin
-        if self.sequence: body['sequence'] = self.sequence
-        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.error:
+            body["error"] = self.error
+        if self.event_type is not None:
+            body["event_type"] = self.event_type
+        if self.id is not None:
+            body["id"] = self.id
+        if self.level is not None:
+            body["level"] = self.level
+        if self.maturity_level is not None:
+            body["maturity_level"] = self.maturity_level
+        if self.message is not None:
+            body["message"] = self.message
+        if self.origin:
+            body["origin"] = self.origin
+        if self.sequence:
+            body["sequence"] = self.sequence
+        if self.timestamp is not None:
+            body["timestamp"] = self.timestamp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineEvent:
         """Deserializes the PipelineEvent from a dictionary."""
-        return cls(error=_from_dict(d, 'error', ErrorDetail),
-                   event_type=d.get('event_type', None),
-                   id=d.get('id', None),
-                   level=_enum(d, 'level', EventLevel),
-                   maturity_level=_enum(d, 'maturity_level', MaturityLevel),
-                   message=d.get('message', None),
-                   origin=_from_dict(d, 'origin', Origin),
-                   sequence=_from_dict(d, 'sequence', Sequencing),
-                   timestamp=d.get('timestamp', None))
+        return cls(
+            error=_from_dict(d, "error", ErrorDetail),
+            event_type=d.get("event_type", None),
+            id=d.get("id", None),
+            level=_enum(d, "level", EventLevel),
+            maturity_level=_enum(d, "maturity_level", MaturityLevel),
+            message=d.get("message", None),
+            origin=_from_dict(d, "origin", Origin),
+            sequence=_from_dict(d, "sequence", Sequencing),
+            timestamp=d.get("timestamp", None),
+        )
 
 
 @dataclass
@@ -1651,31 +1994,43 @@ class PipelineLibrary:
     def as_dict(self) -> dict:
         """Serializes the PipelineLibrary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.file: body['file'] = self.file.as_dict()
-        if self.jar is not None: body['jar'] = self.jar
-        if self.maven: body['maven'] = self.maven.as_dict()
-        if self.notebook: body['notebook'] = self.notebook.as_dict()
-        if self.whl is not None: body['whl'] = self.whl
+        if self.file:
+            body["file"] = self.file.as_dict()
+        if self.jar is not None:
+            body["jar"] = self.jar
+        if self.maven:
+            body["maven"] = self.maven.as_dict()
+        if self.notebook:
+            body["notebook"] = self.notebook.as_dict()
+        if self.whl is not None:
+            body["whl"] = self.whl
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineLibrary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.file: body['file'] = self.file
-        if self.jar is not None: body['jar'] = self.jar
-        if self.maven: body['maven'] = self.maven
-        if self.notebook: body['notebook'] = self.notebook
-        if self.whl is not None: body['whl'] = self.whl
+        if self.file:
+            body["file"] = self.file
+        if self.jar is not None:
+            body["jar"] = self.jar
+        if self.maven:
+            body["maven"] = self.maven
+        if self.notebook:
+            body["notebook"] = self.notebook
+        if self.whl is not None:
+            body["whl"] = self.whl
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary:
         """Deserializes the PipelineLibrary from a dictionary."""
-        return cls(file=_from_dict(d, 'file', FileLibrary),
-                   jar=d.get('jar', None),
-                   maven=_from_dict(d, 'maven', compute.MavenLibrary),
-                   notebook=_from_dict(d, 'notebook', NotebookLibrary),
-                   whl=d.get('whl', None))
+        return cls(
+            file=_from_dict(d, "file", FileLibrary),
+            jar=d.get("jar", None),
+            maven=_from_dict(d, "maven", compute.MavenLibrary),
+            notebook=_from_dict(d, "notebook", NotebookLibrary),
+            whl=d.get("whl", None),
+        )
 
 
 @dataclass
@@ -1690,34 +2045,42 @@ class PipelinePermission:
     def as_dict(self) -> dict:
         """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelinePermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermission:
         """Deserializes the PipelinePermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', PipelinePermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", PipelinePermissionLevel),
+        )
 
 
 class PipelinePermissionLevel(Enum):
     """Permission level"""
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_RUN = 'CAN_RUN'
-    CAN_VIEW = 'CAN_VIEW'
-    IS_OWNER = 'IS_OWNER'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_RUN = "CAN_RUN"
+    CAN_VIEW = "CAN_VIEW"
+    IS_OWNER = "IS_OWNER"
 
 
 @dataclass
@@ -1732,26 +2095,32 @@ def as_dict(self) -> dict:
         """Serializes the PipelinePermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelinePermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissions:
         """Deserializes the PipelinePermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      PipelineAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -1764,22 +2133,28 @@ class PipelinePermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelinePermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsDescription:
         """Deserializes the PipelinePermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', PipelinePermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", PipelinePermissionLevel),
+        )
 
 
 @dataclass
@@ -1793,22 +2168,27 @@ def as_dict(self) -> dict:
         """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsRequest:
         """Deserializes the PipelinePermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', PipelineAccessControlRequest),
-                   pipeline_id=d.get('pipeline_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest),
+            pipeline_id=d.get("pipeline_id", None),
+        )
 
 
 @dataclass
@@ -1892,99 +2272,147 @@ class PipelineSpec:
     def as_dict(self) -> dict:
         """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.channel is not None: body['channel'] = self.channel
-        if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
-        if self.configuration: body['configuration'] = self.configuration
-        if self.continuous is not None: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment.as_dict()
-        if self.development is not None: body['development'] = self.development
-        if self.edition is not None: body['edition'] = self.edition
-        if self.filters: body['filters'] = self.filters.as_dict()
-        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict()
-        if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
-        if self.name is not None: body['name'] = self.name
-        if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
-        if self.photon is not None: body['photon'] = self.photon
-        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
-        if self.schema is not None: body['schema'] = self.schema
-        if self.serverless is not None: body['serverless'] = self.serverless
-        if self.storage is not None: body['storage'] = self.storage
-        if self.target is not None: body['target'] = self.target
-        if self.trigger: body['trigger'] = self.trigger.as_dict()
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.channel is not None:
+            body["channel"] = self.channel
+        if self.clusters:
+            body["clusters"] = [v.as_dict() for v in self.clusters]
+        if self.configuration:
+            body["configuration"] = self.configuration
+        if self.continuous is not None:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment.as_dict()
+        if self.development is not None:
+            body["development"] = self.development
+        if self.edition is not None:
+            body["edition"] = self.edition
+        if self.filters:
+            body["filters"] = self.filters.as_dict()
+        if self.gateway_definition:
+            body["gateway_definition"] = self.gateway_definition.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.ingestion_definition:
+            body["ingestion_definition"] = self.ingestion_definition.as_dict()
+        if self.libraries:
+            body["libraries"] = [v.as_dict() for v in self.libraries]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notifications:
+            body["notifications"] = [v.as_dict() for v in self.notifications]
+        if self.photon is not None:
+            body["photon"] = self.photon
+        if self.restart_window:
+            body["restart_window"] = self.restart_window.as_dict()
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.serverless is not None:
+            body["serverless"] = self.serverless
+        if self.storage is not None:
+            body["storage"] = self.storage
+        if self.target is not None:
+            body["target"] = self.target
+        if self.trigger:
+            body["trigger"] = self.trigger.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.channel is not None: body['channel'] = self.channel
-        if self.clusters: body['clusters'] = self.clusters
-        if self.configuration: body['configuration'] = self.configuration
-        if self.continuous is not None: body['continuous'] = self.continuous
-        if self.deployment: body['deployment'] = self.deployment
-        if self.development is not None: body['development'] = self.development
-        if self.edition is not None: body['edition'] = self.edition
-        if self.filters: body['filters'] = self.filters
-        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
-        if self.id is not None: body['id'] = self.id
-        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
-        if self.libraries: body['libraries'] = self.libraries
-        if self.name is not None: body['name'] = self.name
-        if self.notifications: body['notifications'] = self.notifications
-        if self.photon is not None: body['photon'] = self.photon
-        if self.restart_window: body['restart_window'] = self.restart_window
-        if self.schema is not None: body['schema'] = self.schema
-        if self.serverless is not None: body['serverless'] = self.serverless
-        if self.storage is not None: body['storage'] = self.storage
-        if self.target is not None: body['target'] = self.target
-        if self.trigger: body['trigger'] = self.trigger
+        if self.budget_policy_id is not None:
+            body["budget_policy_id"] = self.budget_policy_id
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.channel is not None:
+            body["channel"] = self.channel
+        if self.clusters:
+            body["clusters"] = self.clusters
+        if self.configuration:
+            body["configuration"] = self.configuration
+        if self.continuous is not None:
+            body["continuous"] = self.continuous
+        if self.deployment:
+            body["deployment"] = self.deployment
+        if self.development is not None:
+            body["development"] = self.development
+        if self.edition is not None:
+            body["edition"] = self.edition
+        if self.filters:
+            body["filters"] = self.filters
+        if self.gateway_definition:
+            body["gateway_definition"] = self.gateway_definition
+        if self.id is not None:
+            body["id"] = self.id
+        if self.ingestion_definition:
+            body["ingestion_definition"] = self.ingestion_definition
+        if self.libraries:
+            body["libraries"] = self.libraries
+        if self.name is not None:
+            body["name"] = self.name
+        if self.notifications:
+            body["notifications"] = self.notifications
+        if self.photon is not None:
+            body["photon"] = self.photon
+        if self.restart_window:
+            body["restart_window"] = self.restart_window
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.serverless is not None:
+            body["serverless"] = self.serverless
+        if self.storage is not None:
+            body["storage"] = self.storage
+        if self.target is not None:
+            body["target"] = self.target
+        if self.trigger:
+            body["trigger"] = self.trigger
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
         """Deserializes the PipelineSpec from a dictionary."""
-        return cls(budget_policy_id=d.get('budget_policy_id', None),
-                   catalog=d.get('catalog', None),
-                   channel=d.get('channel', None),
-                   clusters=_repeated_dict(d, 'clusters', PipelineCluster),
-                   configuration=d.get('configuration', None),
-                   continuous=d.get('continuous', None),
-                   deployment=_from_dict(d, 'deployment', PipelineDeployment),
-                   development=d.get('development', None),
-                   edition=d.get('edition', None),
-                   filters=_from_dict(d, 'filters', Filters),
-                   gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition),
-                   id=d.get('id', None),
-                   ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition),
-                   libraries=_repeated_dict(d, 'libraries', PipelineLibrary),
-                   name=d.get('name', None),
-                   notifications=_repeated_dict(d, 'notifications', Notifications),
-                   photon=d.get('photon', None),
-                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
-                   schema=d.get('schema', None),
-                   serverless=d.get('serverless', None),
-                   storage=d.get('storage', None),
-                   target=d.get('target', None),
-                   trigger=_from_dict(d, 'trigger', PipelineTrigger))
+        return cls(
+            budget_policy_id=d.get("budget_policy_id", None),
+            catalog=d.get("catalog", None),
+            channel=d.get("channel", None),
+            clusters=_repeated_dict(d, "clusters", PipelineCluster),
+            configuration=d.get("configuration", None),
+            continuous=d.get("continuous", None),
+            deployment=_from_dict(d, "deployment", PipelineDeployment),
+            development=d.get("development", None),
+            edition=d.get("edition", None),
+            filters=_from_dict(d, "filters", Filters),
+            gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition),
+            id=d.get("id", None),
+            ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition),
+            libraries=_repeated_dict(d, "libraries", PipelineLibrary),
+            name=d.get("name", None),
+            notifications=_repeated_dict(d, "notifications", Notifications),
+            photon=d.get("photon", None),
+            restart_window=_from_dict(d, "restart_window", RestartWindow),
+            schema=d.get("schema", None),
+            serverless=d.get("serverless", None),
+            storage=d.get("storage", None),
+            target=d.get("target", None),
+            trigger=_from_dict(d, "trigger", PipelineTrigger),
+        )
 
 
 class PipelineState(Enum):
     """The pipeline state."""
 
-    DELETED = 'DELETED'
-    DEPLOYING = 'DEPLOYING'
-    FAILED = 'FAILED'
-    IDLE = 'IDLE'
-    RECOVERING = 'RECOVERING'
-    RESETTING = 'RESETTING'
-    RUNNING = 'RUNNING'
-    STARTING = 'STARTING'
-    STOPPING = 'STOPPING'
+    DELETED = "DELETED"
+    DEPLOYING = "DEPLOYING"
+    FAILED = "FAILED"
+    IDLE = "IDLE"
+    RECOVERING = "RECOVERING"
+    RESETTING = "RESETTING"
+    RUNNING = "RUNNING"
+    STARTING = "STARTING"
+    STOPPING = "STOPPING"
 
 
 @dataclass
@@ -2017,47 +2445,65 @@ class PipelineStateInfo:
     def as_dict(self) -> dict:
         """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.health is not None: body['health'] = self.health.value
-        if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates]
-        if self.name is not None: body['name'] = self.name
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
-        if self.state is not None: body['state'] = self.state.value
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.health is not None:
+            body["health"] = self.health.value
+        if self.latest_updates:
+            body["latest_updates"] = [v.as_dict() for v in self.latest_updates]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.run_as_user_name is not None:
+            body["run_as_user_name"] = self.run_as_user_name
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineStateInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
-        if self.health is not None: body['health'] = self.health
-        if self.latest_updates: body['latest_updates'] = self.latest_updates
-        if self.name is not None: body['name'] = self.name
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
-        if self.state is not None: body['state'] = self.state
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.creator_user_name is not None:
+            body["creator_user_name"] = self.creator_user_name
+        if self.health is not None:
+            body["health"] = self.health
+        if self.latest_updates:
+            body["latest_updates"] = self.latest_updates
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.run_as_user_name is not None:
+            body["run_as_user_name"] = self.run_as_user_name
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo:
         """Deserializes the PipelineStateInfo from a dictionary."""
-        return cls(cluster_id=d.get('cluster_id', None),
-                   creator_user_name=d.get('creator_user_name', None),
-                   health=_enum(d, 'health', PipelineStateInfoHealth),
-                   latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo),
-                   name=d.get('name', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   run_as_user_name=d.get('run_as_user_name', None),
-                   state=_enum(d, 'state', PipelineState))
+        return cls(
+            cluster_id=d.get("cluster_id", None),
+            creator_user_name=d.get("creator_user_name", None),
+            health=_enum(d, "health", PipelineStateInfoHealth),
+            latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo),
+            name=d.get("name", None),
+            pipeline_id=d.get("pipeline_id", None),
+            run_as_user_name=d.get("run_as_user_name", None),
+            state=_enum(d, "state", PipelineState),
+        )
 
 
 class PipelineStateInfoHealth(Enum):
     """The health of a pipeline."""
 
-    HEALTHY = 'HEALTHY'
-    UNHEALTHY = 'UNHEALTHY'
+    HEALTHY = "HEALTHY"
+    UNHEALTHY = "UNHEALTHY"
 
 
 @dataclass
@@ -2069,21 +2515,28 @@ class PipelineTrigger:
     def as_dict(self) -> dict:
         """Serializes the PipelineTrigger into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cron: body['cron'] = self.cron.as_dict()
-        if self.manual: body['manual'] = self.manual.as_dict()
+        if self.cron:
+            body["cron"] = self.cron.as_dict()
+        if self.manual:
+            body["manual"] = self.manual.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PipelineTrigger into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cron: body['cron'] = self.cron
-        if self.manual: body['manual'] = self.manual
+        if self.cron:
+            body["cron"] = self.cron
+        if self.manual:
+            body["manual"] = self.manual
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger:
         """Deserializes the PipelineTrigger from a dictionary."""
-        return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger))
+        return cls(
+            cron=_from_dict(d, "cron", CronTrigger),
+            manual=_from_dict(d, "manual", ManualTrigger),
+        )
 
 
 @dataclass
@@ -2107,31 +2560,43 @@ class ReportSpec:
     def as_dict(self) -> dict:
         """Serializes the ReportSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
-        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
-        if self.destination_table is not None: body['destination_table'] = self.destination_table
-        if self.source_url is not None: body['source_url'] = self.source_url
-        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        if self.destination_catalog is not None:
+            body["destination_catalog"] = self.destination_catalog
+        if self.destination_schema is not None:
+            body["destination_schema"] = self.destination_schema
+        if self.destination_table is not None:
+            body["destination_table"] = self.destination_table
+        if self.source_url is not None:
+            body["source_url"] = self.source_url
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ReportSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
-        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
-        if self.destination_table is not None: body['destination_table'] = self.destination_table
-        if self.source_url is not None: body['source_url'] = self.source_url
-        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        if self.destination_catalog is not None:
+            body["destination_catalog"] = self.destination_catalog
+        if self.destination_schema is not None:
+            body["destination_schema"] = self.destination_schema
+        if self.destination_table is not None:
+            body["destination_table"] = self.destination_table
+        if self.source_url is not None:
+            body["source_url"] = self.source_url
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
         """Deserializes the ReportSpec from a dictionary."""
-        return cls(destination_catalog=d.get('destination_catalog', None),
-                   destination_schema=d.get('destination_schema', None),
-                   destination_table=d.get('destination_table', None),
-                   source_url=d.get('source_url', None),
-                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+        return cls(
+            destination_catalog=d.get("destination_catalog", None),
+            destination_schema=d.get("destination_schema", None),
+            destination_table=d.get("destination_table", None),
+            source_url=d.get("source_url", None),
+            table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig),
+        )
 
 
 @dataclass
@@ -2152,25 +2617,33 @@ class RestartWindow:
     def as_dict(self) -> dict:
         """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.days_of_week: body['days_of_week'] = [v.value for v in self.days_of_week]
-        if self.start_hour is not None: body['start_hour'] = self.start_hour
-        if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
+        if self.days_of_week:
+            body["days_of_week"] = [v.value for v in self.days_of_week]
+        if self.start_hour is not None:
+            body["start_hour"] = self.start_hour
+        if self.time_zone_id is not None:
+            body["time_zone_id"] = self.time_zone_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestartWindow into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.days_of_week: body['days_of_week'] = self.days_of_week
-        if self.start_hour is not None: body['start_hour'] = self.start_hour
-        if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
+        if self.days_of_week:
+            body["days_of_week"] = self.days_of_week
+        if self.start_hour is not None:
+            body["start_hour"] = self.start_hour
+        if self.time_zone_id is not None:
+            body["time_zone_id"] = self.time_zone_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
         """Deserializes the RestartWindow from a dictionary."""
-        return cls(days_of_week=_repeated_enum(d, 'days_of_week', DayOfWeek),
-                   start_hour=d.get('start_hour', None),
-                   time_zone_id=d.get('time_zone_id', None))
+        return cls(
+            days_of_week=_repeated_enum(d, "days_of_week", DayOfWeek),
+            start_hour=d.get("start_hour", None),
+            time_zone_id=d.get("time_zone_id", None),
+        )
 
 
 @dataclass
@@ -2178,7 +2651,7 @@ class RunAs:
     """Write-only setting, available only in Create/Update calls. Specifies the user or service
     principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
     the pipeline.
-    
+
     Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
     is thrown."""
 
@@ -2193,23 +2666,27 @@ def as_dict(self) -> dict:
         """Serializes the RunAs into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RunAs into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunAs:
         """Deserializes the RunAs from a dictionary."""
-        return cls(service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -2236,31 +2713,43 @@ class SchemaSpec:
     def as_dict(self) -> dict:
         """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
-        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
-        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
-        if self.source_schema is not None: body['source_schema'] = self.source_schema
-        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        if self.destination_catalog is not None:
+            body["destination_catalog"] = self.destination_catalog
+        if self.destination_schema is not None:
+            body["destination_schema"] = self.destination_schema
+        if self.source_catalog is not None:
+            body["source_catalog"] = self.source_catalog
+        if self.source_schema is not None:
+            body["source_schema"] = self.source_schema
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
-        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
-        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
-        if self.source_schema is not None: body['source_schema'] = self.source_schema
-        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        if self.destination_catalog is not None:
+            body["destination_catalog"] = self.destination_catalog
+        if self.destination_schema is not None:
+            body["destination_schema"] = self.destination_schema
+        if self.source_catalog is not None:
+            body["source_catalog"] = self.source_catalog
+        if self.source_schema is not None:
+            body["source_schema"] = self.source_schema
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SchemaSpec:
         """Deserializes the SchemaSpec from a dictionary."""
-        return cls(destination_catalog=d.get('destination_catalog', None),
-                   destination_schema=d.get('destination_schema', None),
-                   source_catalog=d.get('source_catalog', None),
-                   source_schema=d.get('source_schema', None),
-                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+        return cls(
+            destination_catalog=d.get("destination_catalog", None),
+            destination_schema=d.get("destination_schema", None),
+            source_catalog=d.get("source_catalog", None),
+            source_schema=d.get("source_schema", None),
+            table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig),
+        )
 
 
 @dataclass
@@ -2274,22 +2763,28 @@ class Sequencing:
     def as_dict(self) -> dict:
         """Serializes the Sequencing into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no
-        if self.data_plane_id: body['data_plane_id'] = self.data_plane_id.as_dict()
+        if self.control_plane_seq_no is not None:
+            body["control_plane_seq_no"] = self.control_plane_seq_no
+        if self.data_plane_id:
+            body["data_plane_id"] = self.data_plane_id.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Sequencing into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no
-        if self.data_plane_id: body['data_plane_id'] = self.data_plane_id
+        if self.control_plane_seq_no is not None:
+            body["control_plane_seq_no"] = self.control_plane_seq_no
+        if self.data_plane_id:
+            body["data_plane_id"] = self.data_plane_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Sequencing:
         """Deserializes the Sequencing from a dictionary."""
-        return cls(control_plane_seq_no=d.get('control_plane_seq_no', None),
-                   data_plane_id=_from_dict(d, 'data_plane_id', DataPlaneId))
+        return cls(
+            control_plane_seq_no=d.get("control_plane_seq_no", None),
+            data_plane_id=_from_dict(d, "data_plane_id", DataPlaneId),
+        )
 
 
 @dataclass
@@ -2306,25 +2801,33 @@ class SerializedException:
     def as_dict(self) -> dict:
         """Serializes the SerializedException into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.class_name is not None: body['class_name'] = self.class_name
-        if self.message is not None: body['message'] = self.message
-        if self.stack: body['stack'] = [v.as_dict() for v in self.stack]
+        if self.class_name is not None:
+            body["class_name"] = self.class_name
+        if self.message is not None:
+            body["message"] = self.message
+        if self.stack:
+            body["stack"] = [v.as_dict() for v in self.stack]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SerializedException into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.class_name is not None: body['class_name'] = self.class_name
-        if self.message is not None: body['message'] = self.message
-        if self.stack: body['stack'] = self.stack
+        if self.class_name is not None:
+            body["class_name"] = self.class_name
+        if self.message is not None:
+            body["message"] = self.message
+        if self.stack:
+            body["stack"] = self.stack
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SerializedException:
         """Deserializes the SerializedException from a dictionary."""
-        return cls(class_name=d.get('class_name', None),
-                   message=d.get('message', None),
-                   stack=_repeated_dict(d, 'stack', StackFrame))
+        return cls(
+            class_name=d.get("class_name", None),
+            message=d.get("message", None),
+            stack=_repeated_dict(d, "stack", StackFrame),
+        )
 
 
 @dataclass
@@ -2344,28 +2847,38 @@ class StackFrame:
     def as_dict(self) -> dict:
         """Serializes the StackFrame into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.declaring_class is not None: body['declaring_class'] = self.declaring_class
-        if self.file_name is not None: body['file_name'] = self.file_name
-        if self.line_number is not None: body['line_number'] = self.line_number
-        if self.method_name is not None: body['method_name'] = self.method_name
+        if self.declaring_class is not None:
+            body["declaring_class"] = self.declaring_class
+        if self.file_name is not None:
+            body["file_name"] = self.file_name
+        if self.line_number is not None:
+            body["line_number"] = self.line_number
+        if self.method_name is not None:
+            body["method_name"] = self.method_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StackFrame into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.declaring_class is not None: body['declaring_class'] = self.declaring_class
-        if self.file_name is not None: body['file_name'] = self.file_name
-        if self.line_number is not None: body['line_number'] = self.line_number
-        if self.method_name is not None: body['method_name'] = self.method_name
+        if self.declaring_class is not None:
+            body["declaring_class"] = self.declaring_class
+        if self.file_name is not None:
+            body["file_name"] = self.file_name
+        if self.line_number is not None:
+            body["line_number"] = self.line_number
+        if self.method_name is not None:
+            body["method_name"] = self.method_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StackFrame:
         """Deserializes the StackFrame from a dictionary."""
-        return cls(declaring_class=d.get('declaring_class', None),
-                   file_name=d.get('file_name', None),
-                   line_number=d.get('line_number', None),
-                   method_name=d.get('method_name', None))
+        return cls(
+            declaring_class=d.get("declaring_class", None),
+            file_name=d.get("file_name", None),
+            line_number=d.get("line_number", None),
+            method_name=d.get("method_name", None),
+        )
 
 
 @dataclass
@@ -2394,45 +2907,58 @@ class StartUpdate:
     def as_dict(self) -> dict:
         """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause.value
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.cause is not None:
+            body["cause"] = self.cause.value
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
         if self.full_refresh_selection:
-            body['full_refresh_selection'] = [v for v in self.full_refresh_selection]
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.refresh_selection: body['refresh_selection'] = [v for v in self.refresh_selection]
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+            body["full_refresh_selection"] = [v for v in self.full_refresh_selection]
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.refresh_selection:
+            body["refresh_selection"] = [v for v in self.refresh_selection]
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StartUpdate into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
-        if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.refresh_selection: body['refresh_selection'] = self.refresh_selection
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
+        if self.full_refresh_selection:
+            body["full_refresh_selection"] = self.full_refresh_selection
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.refresh_selection:
+            body["refresh_selection"] = self.refresh_selection
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartUpdate:
         """Deserializes the StartUpdate from a dictionary."""
-        return cls(cause=_enum(d, 'cause', StartUpdateCause),
-                   full_refresh=d.get('full_refresh', None),
-                   full_refresh_selection=d.get('full_refresh_selection', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   refresh_selection=d.get('refresh_selection', None),
-                   validate_only=d.get('validate_only', None))
+        return cls(
+            cause=_enum(d, "cause", StartUpdateCause),
+            full_refresh=d.get("full_refresh", None),
+            full_refresh_selection=d.get("full_refresh_selection", None),
+            pipeline_id=d.get("pipeline_id", None),
+            refresh_selection=d.get("refresh_selection", None),
+            validate_only=d.get("validate_only", None),
+        )
 
 
 class StartUpdateCause(Enum):
 
-    API_CALL = 'API_CALL'
-    JOB_TASK = 'JOB_TASK'
-    RETRY_ON_FAILURE = 'RETRY_ON_FAILURE'
-    SCHEMA_CHANGE = 'SCHEMA_CHANGE'
-    SERVICE_UPGRADE = 'SERVICE_UPGRADE'
-    USER_ACTION = 'USER_ACTION'
+    API_CALL = "API_CALL"
+    JOB_TASK = "JOB_TASK"
+    RETRY_ON_FAILURE = "RETRY_ON_FAILURE"
+    SCHEMA_CHANGE = "SCHEMA_CHANGE"
+    SERVICE_UPGRADE = "SERVICE_UPGRADE"
+    USER_ACTION = "USER_ACTION"
 
 
 @dataclass
@@ -2442,19 +2968,21 @@ class StartUpdateResponse:
     def as_dict(self) -> dict:
         """Serializes the StartUpdateResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StartUpdateResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartUpdateResponse:
         """Deserializes the StartUpdateResponse from a dictionary."""
-        return cls(update_id=d.get('update_id', None))
+        return cls(update_id=d.get("update_id", None))
 
 
 @dataclass
@@ -2504,37 +3032,53 @@ class TableSpec:
     def as_dict(self) -> dict:
         """Serializes the TableSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
-        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
-        if self.destination_table is not None: body['destination_table'] = self.destination_table
-        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
-        if self.source_schema is not None: body['source_schema'] = self.source_schema
-        if self.source_table is not None: body['source_table'] = self.source_table
-        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        if self.destination_catalog is not None:
+            body["destination_catalog"] = self.destination_catalog
+        if self.destination_schema is not None:
+            body["destination_schema"] = self.destination_schema
+        if self.destination_table is not None:
+            body["destination_table"] = self.destination_table
+        if self.source_catalog is not None:
+            body["source_catalog"] = self.source_catalog
+        if self.source_schema is not None:
+            body["source_schema"] = self.source_schema
+        if self.source_table is not None:
+            body["source_table"] = self.source_table
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
-        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
-        if self.destination_table is not None: body['destination_table'] = self.destination_table
-        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
-        if self.source_schema is not None: body['source_schema'] = self.source_schema
-        if self.source_table is not None: body['source_table'] = self.source_table
-        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        if self.destination_catalog is not None:
+            body["destination_catalog"] = self.destination_catalog
+        if self.destination_schema is not None:
+            body["destination_schema"] = self.destination_schema
+        if self.destination_table is not None:
+            body["destination_table"] = self.destination_table
+        if self.source_catalog is not None:
+            body["source_catalog"] = self.source_catalog
+        if self.source_schema is not None:
+            body["source_schema"] = self.source_schema
+        if self.source_table is not None:
+            body["source_table"] = self.source_table
+        if self.table_configuration:
+            body["table_configuration"] = self.table_configuration
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSpec:
         """Deserializes the TableSpec from a dictionary."""
-        return cls(destination_catalog=d.get('destination_catalog', None),
-                   destination_schema=d.get('destination_schema', None),
-                   destination_table=d.get('destination_table', None),
-                   source_catalog=d.get('source_catalog', None),
-                   source_schema=d.get('source_schema', None),
-                   source_table=d.get('source_table', None),
-                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+        return cls(
+            destination_catalog=d.get("destination_catalog", None),
+            destination_schema=d.get("destination_schema", None),
+            destination_table=d.get("destination_table", None),
+            source_catalog=d.get("source_catalog", None),
+            source_schema=d.get("source_schema", None),
+            source_table=d.get("source_table", None),
+            table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig),
+        )
 
 
 @dataclass
@@ -2556,37 +3100,45 @@ class TableSpecificConfig:
     def as_dict(self) -> dict:
         """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys]
+        if self.primary_keys:
+            body["primary_keys"] = [v for v in self.primary_keys]
         if self.salesforce_include_formula_fields is not None:
-            body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
-        if self.scd_type is not None: body['scd_type'] = self.scd_type.value
-        if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
+            body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields
+        if self.scd_type is not None:
+            body["scd_type"] = self.scd_type.value
+        if self.sequence_by:
+            body["sequence_by"] = [v for v in self.sequence_by]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.primary_keys: body['primary_keys'] = self.primary_keys
+        if self.primary_keys:
+            body["primary_keys"] = self.primary_keys
         if self.salesforce_include_formula_fields is not None:
-            body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
-        if self.scd_type is not None: body['scd_type'] = self.scd_type
-        if self.sequence_by: body['sequence_by'] = self.sequence_by
+            body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields
+        if self.scd_type is not None:
+            body["scd_type"] = self.scd_type
+        if self.sequence_by:
+            body["sequence_by"] = self.sequence_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig:
         """Deserializes the TableSpecificConfig from a dictionary."""
-        return cls(primary_keys=d.get('primary_keys', None),
-                   salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None),
-                   scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType),
-                   sequence_by=d.get('sequence_by', None))
+        return cls(
+            primary_keys=d.get("primary_keys", None),
+            salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None),
+            scd_type=_enum(d, "scd_type", TableSpecificConfigScdType),
+            sequence_by=d.get("sequence_by", None),
+        )
 
 
 class TableSpecificConfigScdType(Enum):
     """The SCD type to use to ingest the table."""
 
-    SCD_TYPE_1 = 'SCD_TYPE_1'
-    SCD_TYPE_2 = 'SCD_TYPE_2'
+    SCD_TYPE_1 = "SCD_TYPE_1"
+    SCD_TYPE_2 = "SCD_TYPE_2"
 
 
 @dataclass
@@ -2633,77 +3185,100 @@ class UpdateInfo:
     def as_dict(self) -> dict:
         """Serializes the UpdateInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause.value
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.config: body['config'] = self.config.as_dict()
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.cause is not None:
+            body["cause"] = self.cause.value
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
         if self.full_refresh_selection:
-            body['full_refresh_selection'] = [v for v in self.full_refresh_selection]
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.refresh_selection: body['refresh_selection'] = [v for v in self.refresh_selection]
-        if self.state is not None: body['state'] = self.state.value
-        if self.update_id is not None: body['update_id'] = self.update_id
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+            body["full_refresh_selection"] = [v for v in self.full_refresh_selection]
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.refresh_selection:
+            body["refresh_selection"] = [v for v in self.refresh_selection]
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cause is not None: body['cause'] = self.cause
-        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
-        if self.config: body['config'] = self.config
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
-        if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.refresh_selection: body['refresh_selection'] = self.refresh_selection
-        if self.state is not None: body['state'] = self.state
-        if self.update_id is not None: body['update_id'] = self.update_id
-        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        if self.cause is not None:
+            body["cause"] = self.cause
+        if self.cluster_id is not None:
+            body["cluster_id"] = self.cluster_id
+        if self.config:
+            body["config"] = self.config
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.full_refresh is not None:
+            body["full_refresh"] = self.full_refresh
+        if self.full_refresh_selection:
+            body["full_refresh_selection"] = self.full_refresh_selection
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.refresh_selection:
+            body["refresh_selection"] = self.refresh_selection
+        if self.state is not None:
+            body["state"] = self.state
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
+        if self.validate_only is not None:
+            body["validate_only"] = self.validate_only
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInfo:
         """Deserializes the UpdateInfo from a dictionary."""
-        return cls(cause=_enum(d, 'cause', UpdateInfoCause),
-                   cluster_id=d.get('cluster_id', None),
-                   config=_from_dict(d, 'config', PipelineSpec),
-                   creation_time=d.get('creation_time', None),
-                   full_refresh=d.get('full_refresh', None),
-                   full_refresh_selection=d.get('full_refresh_selection', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   refresh_selection=d.get('refresh_selection', None),
-                   state=_enum(d, 'state', UpdateInfoState),
-                   update_id=d.get('update_id', None),
-                   validate_only=d.get('validate_only', None))
+        return cls(
+            cause=_enum(d, "cause", UpdateInfoCause),
+            cluster_id=d.get("cluster_id", None),
+            config=_from_dict(d, "config", PipelineSpec),
+            creation_time=d.get("creation_time", None),
+            full_refresh=d.get("full_refresh", None),
+            full_refresh_selection=d.get("full_refresh_selection", None),
+            pipeline_id=d.get("pipeline_id", None),
+            refresh_selection=d.get("refresh_selection", None),
+            state=_enum(d, "state", UpdateInfoState),
+            update_id=d.get("update_id", None),
+            validate_only=d.get("validate_only", None),
+        )
 
 
 class UpdateInfoCause(Enum):
     """What triggered this update."""
 
-    API_CALL = 'API_CALL'
-    JOB_TASK = 'JOB_TASK'
-    RETRY_ON_FAILURE = 'RETRY_ON_FAILURE'
-    SCHEMA_CHANGE = 'SCHEMA_CHANGE'
-    SERVICE_UPGRADE = 'SERVICE_UPGRADE'
-    USER_ACTION = 'USER_ACTION'
+    API_CALL = "API_CALL"
+    JOB_TASK = "JOB_TASK"
+    RETRY_ON_FAILURE = "RETRY_ON_FAILURE"
+    SCHEMA_CHANGE = "SCHEMA_CHANGE"
+    SERVICE_UPGRADE = "SERVICE_UPGRADE"
+    USER_ACTION = "USER_ACTION"
 
 
 class UpdateInfoState(Enum):
     """The update state."""
 
-    CANCELED = 'CANCELED'
-    COMPLETED = 'COMPLETED'
-    CREATED = 'CREATED'
-    FAILED = 'FAILED'
-    INITIALIZING = 'INITIALIZING'
-    QUEUED = 'QUEUED'
-    RESETTING = 'RESETTING'
-    RUNNING = 'RUNNING'
-    SETTING_UP_TABLES = 'SETTING_UP_TABLES'
-    STOPPING = 'STOPPING'
-    WAITING_FOR_RESOURCES = 'WAITING_FOR_RESOURCES'
+    CANCELED = "CANCELED"
+    COMPLETED = "COMPLETED"
+    CREATED = "CREATED"
+    FAILED = "FAILED"
+    INITIALIZING = "INITIALIZING"
+    QUEUED = "QUEUED"
+    RESETTING = "RESETTING"
+    RUNNING = "RUNNING"
+    SETTING_UP_TABLES = "SETTING_UP_TABLES"
+    STOPPING = "STOPPING"
+    WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES"
 
 
 @dataclass
@@ -2717,66 +3292,76 @@ class UpdateStateInfo:
     def as_dict(self) -> dict:
         """Serializes the UpdateStateInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.state is not None: body['state'] = self.state.value
-        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateStateInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.state is not None: body['state'] = self.state
-        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.state is not None:
+            body["state"] = self.state
+        if self.update_id is not None:
+            body["update_id"] = self.update_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateStateInfo:
         """Deserializes the UpdateStateInfo from a dictionary."""
-        return cls(creation_time=d.get('creation_time', None),
-                   state=_enum(d, 'state', UpdateStateInfoState),
-                   update_id=d.get('update_id', None))
+        return cls(
+            creation_time=d.get("creation_time", None),
+            state=_enum(d, "state", UpdateStateInfoState),
+            update_id=d.get("update_id", None),
+        )
 
 
 class UpdateStateInfoState(Enum):
 
-    CANCELED = 'CANCELED'
-    COMPLETED = 'COMPLETED'
-    CREATED = 'CREATED'
-    FAILED = 'FAILED'
-    INITIALIZING = 'INITIALIZING'
-    QUEUED = 'QUEUED'
-    RESETTING = 'RESETTING'
-    RUNNING = 'RUNNING'
-    SETTING_UP_TABLES = 'SETTING_UP_TABLES'
-    STOPPING = 'STOPPING'
-    WAITING_FOR_RESOURCES = 'WAITING_FOR_RESOURCES'
+    CANCELED = "CANCELED"
+    COMPLETED = "COMPLETED"
+    CREATED = "CREATED"
+    FAILED = "FAILED"
+    INITIALIZING = "INITIALIZING"
+    QUEUED = "QUEUED"
+    RESETTING = "RESETTING"
+    RUNNING = "RUNNING"
+    SETTING_UP_TABLES = "SETTING_UP_TABLES"
+    STOPPING = "STOPPING"
+    WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES"
 
 
 class PipelinesAPI:
     """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
-    
+
     Delta Live Tables is a framework for building reliable, maintainable, and testable data processing
     pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task
     orchestration, cluster management, monitoring, data quality, and error handling.
-    
+
     Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables
     manages how your data is transformed based on a target schema you define for each processing step. You can
     also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
-    data quality and specify how to handle records that fail those expectations."""
+    data quality and specify how to handle records that fail those expectations.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def wait_get_pipeline_running(
-            self,
-            pipeline_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
+        self,
+        pipeline_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[GetPipelineResponse], None]] = None,
+    ) -> GetPipelineResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (PipelineState.RUNNING, )
-        failure_states = (PipelineState.FAILED, )
-        status_message = 'polling...'
+        target_states = (PipelineState.RUNNING,)
+        failure_states = (PipelineState.FAILED,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(pipeline_id=pipeline_id)
@@ -2787,27 +3372,28 @@ def wait_get_pipeline_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach RUNNING, got {status}: {status_message}'
+                msg = f"failed to reach RUNNING, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"pipeline_id={pipeline_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def wait_get_pipeline_idle(
-            self,
-            pipeline_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
+        self,
+        pipeline_id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[GetPipelineResponse], None]] = None,
+    ) -> GetPipelineResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (PipelineState.IDLE, )
-        failure_states = (PipelineState.FAILED, )
-        status_message = 'polling...'
+        target_states = (PipelineState.IDLE,)
+        failure_states = (PipelineState.FAILED,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(pipeline_id=pipeline_id)
@@ -2818,51 +3404,53 @@ def wait_get_pipeline_idle(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach IDLE, got {status}: {status_message}'
+                msg = f"failed to reach IDLE, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"pipeline_id={pipeline_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def create(self,
-               *,
-               allow_duplicate_names: Optional[bool] = None,
-               budget_policy_id: Optional[str] = None,
-               catalog: Optional[str] = None,
-               channel: Optional[str] = None,
-               clusters: Optional[List[PipelineCluster]] = None,
-               configuration: Optional[Dict[str, str]] = None,
-               continuous: Optional[bool] = None,
-               deployment: Optional[PipelineDeployment] = None,
-               development: Optional[bool] = None,
-               dry_run: Optional[bool] = None,
-               edition: Optional[str] = None,
-               filters: Optional[Filters] = None,
-               gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
-               id: Optional[str] = None,
-               ingestion_definition: Optional[IngestionPipelineDefinition] = None,
-               libraries: Optional[List[PipelineLibrary]] = None,
-               name: Optional[str] = None,
-               notifications: Optional[List[Notifications]] = None,
-               photon: Optional[bool] = None,
-               restart_window: Optional[RestartWindow] = None,
-               run_as: Optional[RunAs] = None,
-               schema: Optional[str] = None,
-               serverless: Optional[bool] = None,
-               storage: Optional[str] = None,
-               target: Optional[str] = None,
-               trigger: Optional[PipelineTrigger] = None) -> CreatePipelineResponse:
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
+
+    def create(
+        self,
+        *,
+        allow_duplicate_names: Optional[bool] = None,
+        budget_policy_id: Optional[str] = None,
+        catalog: Optional[str] = None,
+        channel: Optional[str] = None,
+        clusters: Optional[List[PipelineCluster]] = None,
+        configuration: Optional[Dict[str, str]] = None,
+        continuous: Optional[bool] = None,
+        deployment: Optional[PipelineDeployment] = None,
+        development: Optional[bool] = None,
+        dry_run: Optional[bool] = None,
+        edition: Optional[str] = None,
+        filters: Optional[Filters] = None,
+        gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
+        id: Optional[str] = None,
+        ingestion_definition: Optional[IngestionPipelineDefinition] = None,
+        libraries: Optional[List[PipelineLibrary]] = None,
+        name: Optional[str] = None,
+        notifications: Optional[List[Notifications]] = None,
+        photon: Optional[bool] = None,
+        restart_window: Optional[RestartWindow] = None,
+        run_as: Optional[RunAs] = None,
+        schema: Optional[str] = None,
+        serverless: Optional[bool] = None,
+        storage: Optional[str] = None,
+        target: Optional[str] = None,
+        trigger: Optional[PipelineTrigger] = None,
+    ) -> CreatePipelineResponse:
         """Create a pipeline.
-        
+
         Creates a new data processing pipeline based on the requested configuration. If successful, this
         method returns the ID of the new pipeline.
-        
+
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name conflicts with that of another pipeline.
         :param budget_policy_id: str (optional)
@@ -2908,7 +3496,7 @@ def create(self,
         :param run_as: :class:`RunAs` (optional)
           Write-only setting, available only in Create/Update calls. Specifies the user or service principal
           that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
-          
+
           Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
           thrown.
         :param schema: str (optional)
@@ -2923,137 +3511,188 @@ def create(self,
           to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
         :param trigger: :class:`PipelineTrigger` (optional)
           Which pipeline trigger to use. Deprecated: Use `continuous` instead.
-        
+
         :returns: :class:`CreatePipelineResponse`
         """
         body = {}
-        if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names
-        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
-        if catalog is not None: body['catalog'] = catalog
-        if channel is not None: body['channel'] = channel
-        if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters]
-        if configuration is not None: body['configuration'] = configuration
-        if continuous is not None: body['continuous'] = continuous
-        if deployment is not None: body['deployment'] = deployment.as_dict()
-        if development is not None: body['development'] = development
-        if dry_run is not None: body['dry_run'] = dry_run
-        if edition is not None: body['edition'] = edition
-        if filters is not None: body['filters'] = filters.as_dict()
-        if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict()
-        if id is not None: body['id'] = id
-        if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict()
-        if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
-        if name is not None: body['name'] = name
-        if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
-        if photon is not None: body['photon'] = photon
-        if restart_window is not None: body['restart_window'] = restart_window.as_dict()
-        if run_as is not None: body['run_as'] = run_as.as_dict()
-        if schema is not None: body['schema'] = schema
-        if serverless is not None: body['serverless'] = serverless
-        if storage is not None: body['storage'] = storage
-        if target is not None: body['target'] = target
-        if trigger is not None: body['trigger'] = trigger.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/pipelines', body=body, headers=headers)
+        if allow_duplicate_names is not None:
+            body["allow_duplicate_names"] = allow_duplicate_names
+        if budget_policy_id is not None:
+            body["budget_policy_id"] = budget_policy_id
+        if catalog is not None:
+            body["catalog"] = catalog
+        if channel is not None:
+            body["channel"] = channel
+        if clusters is not None:
+            body["clusters"] = [v.as_dict() for v in clusters]
+        if configuration is not None:
+            body["configuration"] = configuration
+        if continuous is not None:
+            body["continuous"] = continuous
+        if deployment is not None:
+            body["deployment"] = deployment.as_dict()
+        if development is not None:
+            body["development"] = development
+        if dry_run is not None:
+            body["dry_run"] = dry_run
+        if edition is not None:
+            body["edition"] = edition
+        if filters is not None:
+            body["filters"] = filters.as_dict()
+        if gateway_definition is not None:
+            body["gateway_definition"] = gateway_definition.as_dict()
+        if id is not None:
+            body["id"] = id
+        if ingestion_definition is not None:
+            body["ingestion_definition"] = ingestion_definition.as_dict()
+        if libraries is not None:
+            body["libraries"] = [v.as_dict() for v in libraries]
+        if name is not None:
+            body["name"] = name
+        if notifications is not None:
+            body["notifications"] = [v.as_dict() for v in notifications]
+        if photon is not None:
+            body["photon"] = photon
+        if restart_window is not None:
+            body["restart_window"] = restart_window.as_dict()
+        if run_as is not None:
+            body["run_as"] = run_as.as_dict()
+        if schema is not None:
+            body["schema"] = schema
+        if serverless is not None:
+            body["serverless"] = serverless
+        if storage is not None:
+            body["storage"] = storage
+        if target is not None:
+            body["target"] = target
+        if trigger is not None:
+            body["trigger"] = trigger.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/pipelines", body=body, headers=headers)
         return CreatePipelineResponse.from_dict(res)
 
     def delete(self, pipeline_id: str):
         """Delete a pipeline.
-        
+
         Deletes a pipeline.
-        
+
         :param pipeline_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/pipelines/{pipeline_id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/pipelines/{pipeline_id}", headers=headers)
 
     def get(self, pipeline_id: str) -> GetPipelineResponse:
         """Get a pipeline.
-        
+
         :param pipeline_id: str
-        
+
         :returns: :class:`GetPipelineResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/pipelines/{pipeline_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}", headers=headers)
         return GetPipelineResponse.from_dict(res)
 
     def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevelsResponse:
         """Get pipeline permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
-        
+
         :returns: :class:`GetPipelinePermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/pipelines/{pipeline_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/pipelines/{pipeline_id}/permissionLevels",
+            headers=headers,
+        )
         return GetPipelinePermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, pipeline_id: str) -> PipelinePermissions:
         """Get pipeline permissions.
-        
+
         Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
-        
+
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
-        
+
         :returns: :class:`PipelinePermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/pipelines/{pipeline_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/pipelines/{pipeline_id}",
+            headers=headers,
+        )
         return PipelinePermissions.from_dict(res)
 
     def get_update(self, pipeline_id: str, update_id: str) -> GetUpdateResponse:
         """Get a pipeline update.
-        
+
         Gets an update from an active pipeline.
-        
+
         :param pipeline_id: str
           The ID of the pipeline.
         :param update_id: str
           The ID of the update.
-        
+
         :returns: :class:`GetUpdateResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/pipelines/{pipeline_id}/updates/{update_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/pipelines/{pipeline_id}/updates/{update_id}",
+            headers=headers,
+        )
         return GetUpdateResponse.from_dict(res)
 
-    def list_pipeline_events(self,
-                             pipeline_id: str,
-                             *,
-                             filter: Optional[str] = None,
-                             max_results: Optional[int] = None,
-                             order_by: Optional[List[str]] = None,
-                             page_token: Optional[str] = None) -> Iterator[PipelineEvent]:
+    def list_pipeline_events(
+        self,
+        pipeline_id: str,
+        *,
+        filter: Optional[str] = None,
+        max_results: Optional[int] = None,
+        order_by: Optional[List[str]] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[PipelineEvent]:
         """List pipeline events.
-        
+
         Retrieves events for a pipeline.
-        
+
         :param pipeline_id: str
         :param filter: str (optional)
           Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
           are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
           > 'TIMESTAMP' (or >=,<,<=,=)
-          
+
           Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp>
           '2021-07-22T06:37:33.083Z'
         :param max_results: int (optional)
@@ -3067,46 +3706,56 @@ def list_pipeline_events(self,
           Page token returned by previous call. This field is mutually exclusive with all fields in this
           request except max_results. An error is returned if any fields other than max_results are set when
           this field is set.
-        
+
         :returns: Iterator over :class:`PipelineEvent`
         """
 
         query = {}
-        if filter is not None: query['filter'] = filter
-        if max_results is not None: query['max_results'] = max_results
-        if order_by is not None: query['order_by'] = [v for v in order_by]
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if filter is not None:
+            query["filter"] = filter
+        if max_results is not None:
+            query["max_results"] = max_results
+        if order_by is not None:
+            query["order_by"] = [v for v in order_by]
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/pipelines/{pipeline_id}/events',
-                                query=query,
-                                headers=headers)
-            if 'events' in json:
-                for v in json['events']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/pipelines/{pipeline_id}/events",
+                query=query,
+                headers=headers,
+            )
+            if "events" in json:
+                for v in json["events"]:
                     yield PipelineEvent.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_pipelines(self,
-                       *,
-                       filter: Optional[str] = None,
-                       max_results: Optional[int] = None,
-                       order_by: Optional[List[str]] = None,
-                       page_token: Optional[str] = None) -> Iterator[PipelineStateInfo]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_pipelines(
+        self,
+        *,
+        filter: Optional[str] = None,
+        max_results: Optional[int] = None,
+        order_by: Optional[List[str]] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[PipelineStateInfo]:
         """List pipelines.
-        
+
         Lists pipelines defined in the Delta Live Tables system.
-        
+
         :param filter: str (optional)
           Select a subset of results based on the specified criteria. The supported filters are:
-          
+
           * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE
           '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for
           example: `name LIKE '%shopping%'`
-          
+
           Composite filters are not supported. This field is optional.
         :param max_results: int (optional)
           The maximum number of entries to return in a single page. The system may return fewer than
@@ -3118,36 +3767,44 @@ def list_pipelines(self,
           default is id asc. This field is optional.
         :param page_token: str (optional)
           Page token returned by previous call
-        
+
         :returns: Iterator over :class:`PipelineStateInfo`
         """
 
         query = {}
-        if filter is not None: query['filter'] = filter
-        if max_results is not None: query['max_results'] = max_results
-        if order_by is not None: query['order_by'] = [v for v in order_by]
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if filter is not None:
+            query["filter"] = filter
+        if max_results is not None:
+            query["max_results"] = max_results
+        if order_by is not None:
+            query["order_by"] = [v for v in order_by]
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/pipelines', query=query, headers=headers)
-            if 'statuses' in json:
-                for v in json['statuses']:
+            json = self._api.do("GET", "/api/2.0/pipelines", query=query, headers=headers)
+            if "statuses" in json:
+                for v in json["statuses"]:
                     yield PipelineStateInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_updates(self,
-                     pipeline_id: str,
-                     *,
-                     max_results: Optional[int] = None,
-                     page_token: Optional[str] = None,
-                     until_update_id: Optional[str] = None) -> ListUpdatesResponse:
+            query["page_token"] = json["next_page_token"]
+
+    def list_updates(
+        self,
+        pipeline_id: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+        until_update_id: Optional[str] = None,
+    ) -> ListUpdatesResponse:
         """List pipeline updates.
-        
+
         List updates for an active pipeline.
-        
+
         :param pipeline_id: str
           The pipeline to return updates for.
         :param max_results: int (optional)
@@ -3156,56 +3813,77 @@ def list_updates(self,
           Page token returned by previous call
         :param until_update_id: str (optional)
           If present, returns updates until and including this update_id.
-        
+
         :returns: :class:`ListUpdatesResponse`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        if until_update_id is not None: query['until_update_id'] = until_update_id
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.0/pipelines/{pipeline_id}/updates', query=query, headers=headers)
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        if until_update_id is not None:
+            query["until_update_id"] = until_update_id
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/pipelines/{pipeline_id}/updates",
+            query=query,
+            headers=headers,
+        )
         return ListUpdatesResponse.from_dict(res)
 
     def set_permissions(
-            self,
-            pipeline_id: str,
-            *,
-            access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions:
+        self,
+        pipeline_id: str,
+        *,
+        access_control_list: Optional[List[PipelineAccessControlRequest]] = None,
+    ) -> PipelinePermissions:
         """Set pipeline permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
         :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
-        
+
         :returns: :class:`PipelinePermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT', f'/api/2.0/permissions/pipelines/{pipeline_id}', body=body, headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/pipelines/{pipeline_id}",
+            body=body,
+            headers=headers,
+        )
         return PipelinePermissions.from_dict(res)
 
-    def start_update(self,
-                     pipeline_id: str,
-                     *,
-                     cause: Optional[StartUpdateCause] = None,
-                     full_refresh: Optional[bool] = None,
-                     full_refresh_selection: Optional[List[str]] = None,
-                     refresh_selection: Optional[List[str]] = None,
-                     validate_only: Optional[bool] = None) -> StartUpdateResponse:
+    def start_update(
+        self,
+        pipeline_id: str,
+        *,
+        cause: Optional[StartUpdateCause] = None,
+        full_refresh: Optional[bool] = None,
+        full_refresh_selection: Optional[List[str]] = None,
+        refresh_selection: Optional[List[str]] = None,
+        validate_only: Optional[bool] = None,
+    ) -> StartUpdateResponse:
         """Start a pipeline.
-        
+
         Starts a new update for the pipeline. If there is already an active update for the pipeline, the
         request will fail and the active update will remain running.
-        
+
         :param pipeline_id: str
         :param cause: :class:`StartUpdateCause` (optional)
         :param full_refresh: bool (optional)
@@ -3221,77 +3899,95 @@ def start_update(self,
         :param validate_only: bool (optional)
           If true, this update only validates the correctness of pipeline source code but does not materialize
           or publish any datasets.
-        
+
         :returns: :class:`StartUpdateResponse`
         """
         body = {}
-        if cause is not None: body['cause'] = cause.value
-        if full_refresh is not None: body['full_refresh'] = full_refresh
+        if cause is not None:
+            body["cause"] = cause.value
+        if full_refresh is not None:
+            body["full_refresh"] = full_refresh
         if full_refresh_selection is not None:
-            body['full_refresh_selection'] = [v for v in full_refresh_selection]
-        if refresh_selection is not None: body['refresh_selection'] = [v for v in refresh_selection]
-        if validate_only is not None: body['validate_only'] = validate_only
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', f'/api/2.0/pipelines/{pipeline_id}/updates', body=body, headers=headers)
+            body["full_refresh_selection"] = [v for v in full_refresh_selection]
+        if refresh_selection is not None:
+            body["refresh_selection"] = [v for v in refresh_selection]
+        if validate_only is not None:
+            body["validate_only"] = validate_only
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/pipelines/{pipeline_id}/updates",
+            body=body,
+            headers=headers,
+        )
         return StartUpdateResponse.from_dict(res)
 
     def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]:
         """Stop a pipeline.
-        
+
         Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
         request is a no-op.
-        
+
         :param pipeline_id: str
-        
+
         :returns:
           Long-running operation waiter for :class:`GetPipelineResponse`.
           See :method:wait_get_pipeline_idle for more details.
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        op_response = self._api.do('POST', f'/api/2.0/pipelines/{pipeline_id}/stop', headers=headers)
-        return Wait(self.wait_get_pipeline_idle,
-                    response=StopPipelineResponse.from_dict(op_response),
-                    pipeline_id=pipeline_id)
+        op_response = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/stop", headers=headers)
+        return Wait(
+            self.wait_get_pipeline_idle,
+            response=StopPipelineResponse.from_dict(op_response),
+            pipeline_id=pipeline_id,
+        )
 
     def stop_and_wait(self, pipeline_id: str, timeout=timedelta(minutes=20)) -> GetPipelineResponse:
         return self.stop(pipeline_id=pipeline_id).result(timeout=timeout)
 
-    def update(self,
-               pipeline_id: str,
-               *,
-               allow_duplicate_names: Optional[bool] = None,
-               budget_policy_id: Optional[str] = None,
-               catalog: Optional[str] = None,
-               channel: Optional[str] = None,
-               clusters: Optional[List[PipelineCluster]] = None,
-               configuration: Optional[Dict[str, str]] = None,
-               continuous: Optional[bool] = None,
-               deployment: Optional[PipelineDeployment] = None,
-               development: Optional[bool] = None,
-               edition: Optional[str] = None,
-               expected_last_modified: Optional[int] = None,
-               filters: Optional[Filters] = None,
-               gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
-               id: Optional[str] = None,
-               ingestion_definition: Optional[IngestionPipelineDefinition] = None,
-               libraries: Optional[List[PipelineLibrary]] = None,
-               name: Optional[str] = None,
-               notifications: Optional[List[Notifications]] = None,
-               photon: Optional[bool] = None,
-               restart_window: Optional[RestartWindow] = None,
-               run_as: Optional[RunAs] = None,
-               schema: Optional[str] = None,
-               serverless: Optional[bool] = None,
-               storage: Optional[str] = None,
-               target: Optional[str] = None,
-               trigger: Optional[PipelineTrigger] = None):
+    def update(
+        self,
+        pipeline_id: str,
+        *,
+        allow_duplicate_names: Optional[bool] = None,
+        budget_policy_id: Optional[str] = None,
+        catalog: Optional[str] = None,
+        channel: Optional[str] = None,
+        clusters: Optional[List[PipelineCluster]] = None,
+        configuration: Optional[Dict[str, str]] = None,
+        continuous: Optional[bool] = None,
+        deployment: Optional[PipelineDeployment] = None,
+        development: Optional[bool] = None,
+        edition: Optional[str] = None,
+        expected_last_modified: Optional[int] = None,
+        filters: Optional[Filters] = None,
+        gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None,
+        id: Optional[str] = None,
+        ingestion_definition: Optional[IngestionPipelineDefinition] = None,
+        libraries: Optional[List[PipelineLibrary]] = None,
+        name: Optional[str] = None,
+        notifications: Optional[List[Notifications]] = None,
+        photon: Optional[bool] = None,
+        restart_window: Optional[RestartWindow] = None,
+        run_as: Optional[RunAs] = None,
+        schema: Optional[str] = None,
+        serverless: Optional[bool] = None,
+        storage: Optional[str] = None,
+        target: Optional[str] = None,
+        trigger: Optional[PipelineTrigger] = None,
+    ):
         """Edit a pipeline.
-        
+
         Updates a pipeline with the supplied configuration.
-        
+
         :param pipeline_id: str
           Unique identifier for this pipeline.
         :param allow_duplicate_names: bool (optional)
@@ -3341,7 +4037,7 @@ def update(self,
         :param run_as: :class:`RunAs` (optional)
           Write-only setting, available only in Create/Update calls. Specifies the user or service principal
           that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
-          
+
           Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
           thrown.
         :param schema: str (optional)
@@ -3356,62 +4052,102 @@ def update(self,
           to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
         :param trigger: :class:`PipelineTrigger` (optional)
           Which pipeline trigger to use. Deprecated: Use `continuous` instead.
-        
-        
+
+
         """
         body = {}
-        if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names
-        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
-        if catalog is not None: body['catalog'] = catalog
-        if channel is not None: body['channel'] = channel
-        if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters]
-        if configuration is not None: body['configuration'] = configuration
-        if continuous is not None: body['continuous'] = continuous
-        if deployment is not None: body['deployment'] = deployment.as_dict()
-        if development is not None: body['development'] = development
-        if edition is not None: body['edition'] = edition
-        if expected_last_modified is not None: body['expected_last_modified'] = expected_last_modified
-        if filters is not None: body['filters'] = filters.as_dict()
-        if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict()
-        if id is not None: body['id'] = id
-        if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict()
-        if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries]
-        if name is not None: body['name'] = name
-        if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
-        if photon is not None: body['photon'] = photon
-        if restart_window is not None: body['restart_window'] = restart_window.as_dict()
-        if run_as is not None: body['run_as'] = run_as.as_dict()
-        if schema is not None: body['schema'] = schema
-        if serverless is not None: body['serverless'] = serverless
-        if storage is not None: body['storage'] = storage
-        if target is not None: body['target'] = target
-        if trigger is not None: body['trigger'] = trigger.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PUT', f'/api/2.0/pipelines/{pipeline_id}', body=body, headers=headers)
+        if allow_duplicate_names is not None:
+            body["allow_duplicate_names"] = allow_duplicate_names
+        if budget_policy_id is not None:
+            body["budget_policy_id"] = budget_policy_id
+        if catalog is not None:
+            body["catalog"] = catalog
+        if channel is not None:
+            body["channel"] = channel
+        if clusters is not None:
+            body["clusters"] = [v.as_dict() for v in clusters]
+        if configuration is not None:
+            body["configuration"] = configuration
+        if continuous is not None:
+            body["continuous"] = continuous
+        if deployment is not None:
+            body["deployment"] = deployment.as_dict()
+        if development is not None:
+            body["development"] = development
+        if edition is not None:
+            body["edition"] = edition
+        if expected_last_modified is not None:
+            body["expected_last_modified"] = expected_last_modified
+        if filters is not None:
+            body["filters"] = filters.as_dict()
+        if gateway_definition is not None:
+            body["gateway_definition"] = gateway_definition.as_dict()
+        if id is not None:
+            body["id"] = id
+        if ingestion_definition is not None:
+            body["ingestion_definition"] = ingestion_definition.as_dict()
+        if libraries is not None:
+            body["libraries"] = [v.as_dict() for v in libraries]
+        if name is not None:
+            body["name"] = name
+        if notifications is not None:
+            body["notifications"] = [v.as_dict() for v in notifications]
+        if photon is not None:
+            body["photon"] = photon
+        if restart_window is not None:
+            body["restart_window"] = restart_window.as_dict()
+        if run_as is not None:
+            body["run_as"] = run_as.as_dict()
+        if schema is not None:
+            body["schema"] = schema
+        if serverless is not None:
+            body["serverless"] = serverless
+        if storage is not None:
+            body["storage"] = storage
+        if target is not None:
+            body["target"] = target
+        if trigger is not None:
+            body["trigger"] = trigger.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PUT",
+            f"/api/2.0/pipelines/{pipeline_id}",
+            body=body,
+            headers=headers,
+        )
 
     def update_permissions(
-            self,
-            pipeline_id: str,
-            *,
-            access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions:
+        self,
+        pipeline_id: str,
+        *,
+        access_control_list: Optional[List[PipelineAccessControlRequest]] = None,
+    ) -> PipelinePermissions:
         """Update pipeline permissions.
-        
+
         Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
-        
+
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
         :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
-        
+
         :returns: :class:`PipelinePermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/pipelines/{pipeline_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/pipelines/{pipeline_id}",
+            body=body,
+            headers=headers,
+        )
         return PipelinePermissions.from_dict(res)
diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py
index c54120ad8..c09aa5ed7 100755
--- a/databricks/sdk/service/provisioning.py
+++ b/databricks/sdk/service/provisioning.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -25,19 +25,21 @@ class AwsCredentials:
     def as_dict(self) -> dict:
         """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.sts_role: body['sts_role'] = self.sts_role.as_dict()
+        if self.sts_role:
+            body["sts_role"] = self.sts_role.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.sts_role: body['sts_role'] = self.sts_role
+        if self.sts_role:
+            body["sts_role"] = self.sts_role
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
         """Deserializes the AwsCredentials from a dictionary."""
-        return cls(sts_role=_from_dict(d, 'sts_role', StsRole))
+        return cls(sts_role=_from_dict(d, "sts_role", StsRole))
 
 
 @dataclass
@@ -59,30 +61,38 @@ class AwsKeyInfo:
     def as_dict(self) -> dict:
         """Serializes the AwsKeyInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key_alias is not None: body['key_alias'] = self.key_alias
-        if self.key_arn is not None: body['key_arn'] = self.key_arn
-        if self.key_region is not None: body['key_region'] = self.key_region
+        if self.key_alias is not None:
+            body["key_alias"] = self.key_alias
+        if self.key_arn is not None:
+            body["key_arn"] = self.key_arn
+        if self.key_region is not None:
+            body["key_region"] = self.key_region
         if self.reuse_key_for_cluster_volumes is not None:
-            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+            body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AwsKeyInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key_alias is not None: body['key_alias'] = self.key_alias
-        if self.key_arn is not None: body['key_arn'] = self.key_arn
-        if self.key_region is not None: body['key_region'] = self.key_region
+        if self.key_alias is not None:
+            body["key_alias"] = self.key_alias
+        if self.key_arn is not None:
+            body["key_arn"] = self.key_arn
+        if self.key_region is not None:
+            body["key_region"] = self.key_region
         if self.reuse_key_for_cluster_volumes is not None:
-            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+            body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsKeyInfo:
         """Deserializes the AwsKeyInfo from a dictionary."""
-        return cls(key_alias=d.get('key_alias', None),
-                   key_arn=d.get('key_arn', None),
-                   key_region=d.get('key_region', None),
-                   reuse_key_for_cluster_volumes=d.get('reuse_key_for_cluster_volumes', None))
+        return cls(
+            key_alias=d.get("key_alias", None),
+            key_arn=d.get("key_arn", None),
+            key_region=d.get("key_region", None),
+            reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None),
+        )
 
 
 @dataclass
@@ -96,22 +106,28 @@ class AzureWorkspaceInfo:
     def as_dict(self) -> dict:
         """Serializes the AzureWorkspaceInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.resource_group is not None: body['resource_group'] = self.resource_group
-        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
+        if self.resource_group is not None:
+            body["resource_group"] = self.resource_group
+        if self.subscription_id is not None:
+            body["subscription_id"] = self.subscription_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureWorkspaceInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.resource_group is not None: body['resource_group'] = self.resource_group
-        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
+        if self.resource_group is not None:
+            body["resource_group"] = self.resource_group
+        if self.subscription_id is not None:
+            body["subscription_id"] = self.subscription_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureWorkspaceInfo:
         """Deserializes the AzureWorkspaceInfo from a dictionary."""
-        return cls(resource_group=d.get('resource_group', None),
-                   subscription_id=d.get('subscription_id', None))
+        return cls(
+            resource_group=d.get("resource_group", None),
+            subscription_id=d.get("subscription_id", None),
+        )
 
 
 @dataclass
@@ -124,19 +140,21 @@ class CloudResourceContainer:
     def as_dict(self) -> dict:
         """Serializes the CloudResourceContainer into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.gcp: body['gcp'] = self.gcp.as_dict()
+        if self.gcp:
+            body["gcp"] = self.gcp.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CloudResourceContainer into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.gcp: body['gcp'] = self.gcp
+        if self.gcp:
+            body["gcp"] = self.gcp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudResourceContainer:
         """Deserializes the CloudResourceContainer from a dictionary."""
-        return cls(gcp=_from_dict(d, 'gcp', CustomerFacingGcpCloudResourceContainer))
+        return cls(gcp=_from_dict(d, "gcp", CustomerFacingGcpCloudResourceContainer))
 
 
 @dataclass
@@ -156,27 +174,33 @@ class CreateAwsKeyInfo:
     def as_dict(self) -> dict:
         """Serializes the CreateAwsKeyInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key_alias is not None: body['key_alias'] = self.key_alias
-        if self.key_arn is not None: body['key_arn'] = self.key_arn
+        if self.key_alias is not None:
+            body["key_alias"] = self.key_alias
+        if self.key_arn is not None:
+            body["key_arn"] = self.key_arn
         if self.reuse_key_for_cluster_volumes is not None:
-            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+            body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateAwsKeyInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key_alias is not None: body['key_alias'] = self.key_alias
-        if self.key_arn is not None: body['key_arn'] = self.key_arn
+        if self.key_alias is not None:
+            body["key_alias"] = self.key_alias
+        if self.key_arn is not None:
+            body["key_arn"] = self.key_arn
         if self.reuse_key_for_cluster_volumes is not None:
-            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+            body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAwsKeyInfo:
         """Deserializes the CreateAwsKeyInfo from a dictionary."""
-        return cls(key_alias=d.get('key_alias', None),
-                   key_arn=d.get('key_arn', None),
-                   reuse_key_for_cluster_volumes=d.get('reuse_key_for_cluster_volumes', None))
+        return cls(
+            key_alias=d.get("key_alias", None),
+            key_arn=d.get("key_arn", None),
+            reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None),
+        )
 
 
 @dataclass
@@ -186,19 +210,21 @@ class CreateCredentialAwsCredentials:
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialAwsCredentials into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.sts_role: body['sts_role'] = self.sts_role.as_dict()
+        if self.sts_role:
+            body["sts_role"] = self.sts_role.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCredentialAwsCredentials into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.sts_role: body['sts_role'] = self.sts_role
+        if self.sts_role:
+            body["sts_role"] = self.sts_role
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialAwsCredentials:
         """Deserializes the CreateCredentialAwsCredentials from a dictionary."""
-        return cls(sts_role=_from_dict(d, 'sts_role', CreateCredentialStsRole))
+        return cls(sts_role=_from_dict(d, "sts_role", CreateCredentialStsRole))
 
 
 @dataclass
@@ -211,22 +237,28 @@ class CreateCredentialRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials.as_dict()
-        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        if self.aws_credentials:
+            body["aws_credentials"] = self.aws_credentials.as_dict()
+        if self.credentials_name is not None:
+            body["credentials_name"] = self.credentials_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials
-        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        if self.aws_credentials:
+            body["aws_credentials"] = self.aws_credentials
+        if self.credentials_name is not None:
+            body["credentials_name"] = self.credentials_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
         """Deserializes the CreateCredentialRequest from a dictionary."""
-        return cls(aws_credentials=_from_dict(d, 'aws_credentials', CreateCredentialAwsCredentials),
-                   credentials_name=d.get('credentials_name', None))
+        return cls(
+            aws_credentials=_from_dict(d, "aws_credentials", CreateCredentialAwsCredentials),
+            credentials_name=d.get("credentials_name", None),
+        )
 
 
 @dataclass
@@ -237,19 +269,21 @@ class CreateCredentialStsRole:
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialStsRole into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialStsRole:
         """Deserializes the CreateCredentialStsRole from a dictionary."""
-        return cls(role_arn=d.get('role_arn', None))
+        return cls(role_arn=d.get("role_arn", None))
 
 
 @dataclass
@@ -264,25 +298,33 @@ class CreateCustomerManagedKeyRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateCustomerManagedKeyRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info.as_dict()
-        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info.as_dict()
-        if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases]
+        if self.aws_key_info:
+            body["aws_key_info"] = self.aws_key_info.as_dict()
+        if self.gcp_key_info:
+            body["gcp_key_info"] = self.gcp_key_info.as_dict()
+        if self.use_cases:
+            body["use_cases"] = [v.value for v in self.use_cases]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info
-        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info
-        if self.use_cases: body['use_cases'] = self.use_cases
+        if self.aws_key_info:
+            body["aws_key_info"] = self.aws_key_info
+        if self.gcp_key_info:
+            body["gcp_key_info"] = self.gcp_key_info
+        if self.use_cases:
+            body["use_cases"] = self.use_cases
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomerManagedKeyRequest:
         """Deserializes the CreateCustomerManagedKeyRequest from a dictionary."""
-        return cls(aws_key_info=_from_dict(d, 'aws_key_info', CreateAwsKeyInfo),
-                   gcp_key_info=_from_dict(d, 'gcp_key_info', CreateGcpKeyInfo),
-                   use_cases=_repeated_enum(d, 'use_cases', KeyUseCase))
+        return cls(
+            aws_key_info=_from_dict(d, "aws_key_info", CreateAwsKeyInfo),
+            gcp_key_info=_from_dict(d, "gcp_key_info", CreateGcpKeyInfo),
+            use_cases=_repeated_enum(d, "use_cases", KeyUseCase),
+        )
 
 
 @dataclass
@@ -293,19 +335,21 @@ class CreateGcpKeyInfo:
     def as_dict(self) -> dict:
         """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        if self.kms_key_id is not None:
+            body["kms_key_id"] = self.kms_key_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        if self.kms_key_id is not None:
+            body["kms_key_id"] = self.kms_key_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateGcpKeyInfo:
         """Deserializes the CreateGcpKeyInfo from a dictionary."""
-        return cls(kms_key_id=d.get('kms_key_id', None))
+        return cls(kms_key_id=d.get("kms_key_id", None))
 
 
 @dataclass
@@ -338,34 +382,48 @@ class CreateNetworkRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateNetworkRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info.as_dict()
-        if self.network_name is not None: body['network_name'] = self.network_name
-        if self.security_group_ids: body['security_group_ids'] = [v for v in self.security_group_ids]
-        if self.subnet_ids: body['subnet_ids'] = [v for v in self.subnet_ids]
-        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints.as_dict()
-        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        if self.gcp_network_info:
+            body["gcp_network_info"] = self.gcp_network_info.as_dict()
+        if self.network_name is not None:
+            body["network_name"] = self.network_name
+        if self.security_group_ids:
+            body["security_group_ids"] = [v for v in self.security_group_ids]
+        if self.subnet_ids:
+            body["subnet_ids"] = [v for v in self.subnet_ids]
+        if self.vpc_endpoints:
+            body["vpc_endpoints"] = self.vpc_endpoints.as_dict()
+        if self.vpc_id is not None:
+            body["vpc_id"] = self.vpc_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info
-        if self.network_name is not None: body['network_name'] = self.network_name
-        if self.security_group_ids: body['security_group_ids'] = self.security_group_ids
-        if self.subnet_ids: body['subnet_ids'] = self.subnet_ids
-        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints
-        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        if self.gcp_network_info:
+            body["gcp_network_info"] = self.gcp_network_info
+        if self.network_name is not None:
+            body["network_name"] = self.network_name
+        if self.security_group_ids:
+            body["security_group_ids"] = self.security_group_ids
+        if self.subnet_ids:
+            body["subnet_ids"] = self.subnet_ids
+        if self.vpc_endpoints:
+            body["vpc_endpoints"] = self.vpc_endpoints
+        if self.vpc_id is not None:
+            body["vpc_id"] = self.vpc_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNetworkRequest:
         """Deserializes the CreateNetworkRequest from a dictionary."""
-        return cls(gcp_network_info=_from_dict(d, 'gcp_network_info', GcpNetworkInfo),
-                   network_name=d.get('network_name', None),
-                   security_group_ids=d.get('security_group_ids', None),
-                   subnet_ids=d.get('subnet_ids', None),
-                   vpc_endpoints=_from_dict(d, 'vpc_endpoints', NetworkVpcEndpoints),
-                   vpc_id=d.get('vpc_id', None))
+        return cls(
+            gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo),
+            network_name=d.get("network_name", None),
+            security_group_ids=d.get("security_group_ids", None),
+            subnet_ids=d.get("subnet_ids", None),
+            vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints),
+            vpc_id=d.get("vpc_id", None),
+        )
 
 
 @dataclass
@@ -379,24 +437,28 @@ class CreateStorageConfigurationRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info.as_dict()
+        if self.root_bucket_info:
+            body["root_bucket_info"] = self.root_bucket_info.as_dict()
         if self.storage_configuration_name is not None:
-            body['storage_configuration_name'] = self.storage_configuration_name
+            body["storage_configuration_name"] = self.storage_configuration_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info
+        if self.root_bucket_info:
+            body["root_bucket_info"] = self.root_bucket_info
         if self.storage_configuration_name is not None:
-            body['storage_configuration_name'] = self.storage_configuration_name
+            body["storage_configuration_name"] = self.storage_configuration_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateStorageConfigurationRequest:
         """Deserializes the CreateStorageConfigurationRequest from a dictionary."""
-        return cls(root_bucket_info=_from_dict(d, 'root_bucket_info', RootBucketInfo),
-                   storage_configuration_name=d.get('storage_configuration_name', None))
+        return cls(
+            root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo),
+            storage_configuration_name=d.get("storage_configuration_name", None),
+        )
 
 
 @dataclass
@@ -416,28 +478,38 @@ class CreateVpcEndpointRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateVpcEndpointRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
-        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict()
-        if self.region is not None: body['region'] = self.region
-        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+        if self.aws_vpc_endpoint_id is not None:
+            body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info:
+            body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict()
+        if self.region is not None:
+            body["region"] = self.region
+        if self.vpc_endpoint_name is not None:
+            body["vpc_endpoint_name"] = self.vpc_endpoint_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
-        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info
-        if self.region is not None: body['region'] = self.region
-        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+        if self.aws_vpc_endpoint_id is not None:
+            body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info:
+            body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info
+        if self.region is not None:
+            body["region"] = self.region
+        if self.vpc_endpoint_name is not None:
+            body["vpc_endpoint_name"] = self.vpc_endpoint_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVpcEndpointRequest:
         """Deserializes the CreateVpcEndpointRequest from a dictionary."""
-        return cls(aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None),
-                   gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo),
-                   region=d.get('region', None),
-                   vpc_endpoint_name=d.get('vpc_endpoint_name', None))
+        return cls(
+            aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None),
+            gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo),
+            region=d.get("region", None),
+            vpc_endpoint_name=d.get("vpc_endpoint_name", None),
+        )
 
 
 @dataclass
@@ -552,82 +624,103 @@ class CreateWorkspaceRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateWorkspaceRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
         if self.cloud_resource_container:
-            body['cloud_resource_container'] = self.cloud_resource_container.as_dict()
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+            body["cloud_resource_container"] = self.cloud_resource_container.as_dict()
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.deployment_name is not None:
+            body["deployment_name"] = self.deployment_name
         if self.gcp_managed_network_config:
-            body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
-        if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
+            body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict()
+        if self.gke_config:
+            body["gke_config"] = self.gke_config.as_dict()
         if self.is_no_public_ip_enabled is not None:
-            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
-        if self.location is not None: body['location'] = self.location
+            body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled
+        if self.location is not None:
+            body["location"] = self.location
         if self.managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
-        if self.network_id is not None: body['network_id'] = self.network_id
-        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier.value
+            body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
+        if self.pricing_tier is not None:
+            body["pricing_tier"] = self.pricing_tier.value
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
-        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
+            body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id
+        if self.workspace_name is not None:
+            body["workspace_name"] = self.workspace_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.cloud_resource_container:
+            body["cloud_resource_container"] = self.cloud_resource_container
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.deployment_name is not None:
+            body["deployment_name"] = self.deployment_name
         if self.gcp_managed_network_config:
-            body['gcp_managed_network_config'] = self.gcp_managed_network_config
-        if self.gke_config: body['gke_config'] = self.gke_config
+            body["gcp_managed_network_config"] = self.gcp_managed_network_config
+        if self.gke_config:
+            body["gke_config"] = self.gke_config
         if self.is_no_public_ip_enabled is not None:
-            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
-        if self.location is not None: body['location'] = self.location
+            body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled
+        if self.location is not None:
+            body["location"] = self.location
         if self.managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
-        if self.network_id is not None: body['network_id'] = self.network_id
-        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier
+            body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
+        if self.pricing_tier is not None:
+            body["pricing_tier"] = self.pricing_tier
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
-        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
+            body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id
+        if self.workspace_name is not None:
+            body["workspace_name"] = self.workspace_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest:
         """Deserializes the CreateWorkspaceRequest from a dictionary."""
-        return cls(aws_region=d.get('aws_region', None),
-                   cloud=d.get('cloud', None),
-                   cloud_resource_container=_from_dict(d, 'cloud_resource_container', CloudResourceContainer),
-                   credentials_id=d.get('credentials_id', None),
-                   custom_tags=d.get('custom_tags', None),
-                   deployment_name=d.get('deployment_name', None),
-                   gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
-                                                         GcpManagedNetworkConfig),
-                   gke_config=_from_dict(d, 'gke_config', GkeConfig),
-                   is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
-                   location=d.get('location', None),
-                   managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
-                                                                  None),
-                   network_id=d.get('network_id', None),
-                   pricing_tier=_enum(d, 'pricing_tier', PricingTier),
-                   private_access_settings_id=d.get('private_access_settings_id', None),
-                   storage_configuration_id=d.get('storage_configuration_id', None),
-                   storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
-                   workspace_name=d.get('workspace_name', None))
+        return cls(
+            aws_region=d.get("aws_region", None),
+            cloud=d.get("cloud", None),
+            cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer),
+            credentials_id=d.get("credentials_id", None),
+            custom_tags=d.get("custom_tags", None),
+            deployment_name=d.get("deployment_name", None),
+            gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig),
+            gke_config=_from_dict(d, "gke_config", GkeConfig),
+            is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None),
+            location=d.get("location", None),
+            managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None),
+            network_id=d.get("network_id", None),
+            pricing_tier=_enum(d, "pricing_tier", PricingTier),
+            private_access_settings_id=d.get("private_access_settings_id", None),
+            storage_configuration_id=d.get("storage_configuration_id", None),
+            storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None),
+            workspace_name=d.get("workspace_name", None),
+        )
 
 
 @dataclass
@@ -649,31 +742,43 @@ class Credential:
     def as_dict(self) -> dict:
         """Serializes the Credential into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials.as_dict()
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_credentials:
+            body["aws_credentials"] = self.aws_credentials.as_dict()
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.credentials_name is not None:
+            body["credentials_name"] = self.credentials_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Credential into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_credentials:
+            body["aws_credentials"] = self.aws_credentials
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.credentials_name is not None:
+            body["credentials_name"] = self.credentials_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Credential:
         """Deserializes the Credential from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   aws_credentials=_from_dict(d, 'aws_credentials', AwsCredentials),
-                   creation_time=d.get('creation_time', None),
-                   credentials_id=d.get('credentials_id', None),
-                   credentials_name=d.get('credentials_name', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            aws_credentials=_from_dict(d, "aws_credentials", AwsCredentials),
+            creation_time=d.get("creation_time", None),
+            credentials_id=d.get("credentials_id", None),
+            credentials_name=d.get("credentials_name", None),
+        )
 
 
 CustomTags = Dict[str, str]
@@ -690,19 +795,21 @@ class CustomerFacingGcpCloudResourceContainer:
     def as_dict(self) -> dict:
         """Serializes the CustomerFacingGcpCloudResourceContainer into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.project_id is not None:
+            body["project_id"] = self.project_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CustomerFacingGcpCloudResourceContainer into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.project_id is not None:
+            body["project_id"] = self.project_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomerFacingGcpCloudResourceContainer:
         """Deserializes the CustomerFacingGcpCloudResourceContainer from a dictionary."""
-        return cls(project_id=d.get('project_id', None))
+        return cls(project_id=d.get("project_id", None))
 
 
 @dataclass
@@ -726,36 +833,48 @@ class CustomerManagedKey:
     def as_dict(self) -> dict:
         """Serializes the CustomerManagedKey into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info.as_dict()
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_key_info:
+            body["aws_key_info"] = self.aws_key_info.as_dict()
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
         if self.customer_managed_key_id is not None:
-            body['customer_managed_key_id'] = self.customer_managed_key_id
-        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info.as_dict()
-        if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases]
+            body["customer_managed_key_id"] = self.customer_managed_key_id
+        if self.gcp_key_info:
+            body["gcp_key_info"] = self.gcp_key_info.as_dict()
+        if self.use_cases:
+            body["use_cases"] = [v.value for v in self.use_cases]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CustomerManagedKey into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_key_info:
+            body["aws_key_info"] = self.aws_key_info
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
         if self.customer_managed_key_id is not None:
-            body['customer_managed_key_id'] = self.customer_managed_key_id
-        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info
-        if self.use_cases: body['use_cases'] = self.use_cases
+            body["customer_managed_key_id"] = self.customer_managed_key_id
+        if self.gcp_key_info:
+            body["gcp_key_info"] = self.gcp_key_info
+        if self.use_cases:
+            body["use_cases"] = self.use_cases
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomerManagedKey:
         """Deserializes the CustomerManagedKey from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   aws_key_info=_from_dict(d, 'aws_key_info', AwsKeyInfo),
-                   creation_time=d.get('creation_time', None),
-                   customer_managed_key_id=d.get('customer_managed_key_id', None),
-                   gcp_key_info=_from_dict(d, 'gcp_key_info', GcpKeyInfo),
-                   use_cases=_repeated_enum(d, 'use_cases', KeyUseCase))
+        return cls(
+            account_id=d.get("account_id", None),
+            aws_key_info=_from_dict(d, "aws_key_info", AwsKeyInfo),
+            creation_time=d.get("creation_time", None),
+            customer_managed_key_id=d.get("customer_managed_key_id", None),
+            gcp_key_info=_from_dict(d, "gcp_key_info", GcpKeyInfo),
+            use_cases=_repeated_enum(d, "use_cases", KeyUseCase),
+        )
 
 
 @dataclass
@@ -780,22 +899,23 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
 class EndpointUseCase(Enum):
     """This enumeration represents the type of Databricks VPC [endpoint service] that was used when
     creating this VPC endpoint.
-    
-    [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html"""
 
-    DATAPLANE_RELAY_ACCESS = 'DATAPLANE_RELAY_ACCESS'
-    WORKSPACE_ACCESS = 'WORKSPACE_ACCESS'
+    [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html
+    """
+
+    DATAPLANE_RELAY_ACCESS = "DATAPLANE_RELAY_ACCESS"
+    WORKSPACE_ACCESS = "WORKSPACE_ACCESS"
 
 
 class ErrorType(Enum):
     """The AWS resource associated with this error: credentials, VPC, subnet, security group, or
     network ACL."""
 
-    CREDENTIALS = 'credentials'
-    NETWORK_ACL = 'networkAcl'
-    SECURITY_GROUP = 'securityGroup'
-    SUBNET = 'subnet'
-    VPC = 'vpc'
+    CREDENTIALS = "credentials"
+    NETWORK_ACL = "networkAcl"
+    SECURITY_GROUP = "securityGroup"
+    SUBNET = "subnet"
+    VPC = "vpc"
 
 
 @dataclass
@@ -813,28 +933,32 @@ def as_dict(self) -> dict:
         """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.authoritative_user_email is not None:
-            body['authoritative_user_email'] = self.authoritative_user_email
+            body["authoritative_user_email"] = self.authoritative_user_email
         if self.authoritative_user_full_name is not None:
-            body['authoritative_user_full_name'] = self.authoritative_user_full_name
-        if self.customer_name is not None: body['customer_name'] = self.customer_name
+            body["authoritative_user_full_name"] = self.authoritative_user_full_name
+        if self.customer_name is not None:
+            body["customer_name"] = self.customer_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.authoritative_user_email is not None:
-            body['authoritative_user_email'] = self.authoritative_user_email
+            body["authoritative_user_email"] = self.authoritative_user_email
         if self.authoritative_user_full_name is not None:
-            body['authoritative_user_full_name'] = self.authoritative_user_full_name
-        if self.customer_name is not None: body['customer_name'] = self.customer_name
+            body["authoritative_user_full_name"] = self.authoritative_user_full_name
+        if self.customer_name is not None:
+            body["customer_name"] = self.customer_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalCustomerInfo:
         """Deserializes the ExternalCustomerInfo from a dictionary."""
-        return cls(authoritative_user_email=d.get('authoritative_user_email', None),
-                   authoritative_user_full_name=d.get('authoritative_user_full_name', None),
-                   customer_name=d.get('customer_name', None))
+        return cls(
+            authoritative_user_email=d.get("authoritative_user_email", None),
+            authoritative_user_full_name=d.get("authoritative_user_full_name", None),
+            customer_name=d.get("customer_name", None),
+        )
 
 
 @dataclass
@@ -845,19 +969,21 @@ class GcpKeyInfo:
     def as_dict(self) -> dict:
         """Serializes the GcpKeyInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        if self.kms_key_id is not None:
+            body["kms_key_id"] = self.kms_key_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        if self.kms_key_id is not None:
+            body["kms_key_id"] = self.kms_key_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpKeyInfo:
         """Deserializes the GcpKeyInfo from a dictionary."""
-        return cls(kms_key_id=d.get('kms_key_id', None))
+        return cls(kms_key_id=d.get("kms_key_id", None))
 
 
 @dataclass
@@ -866,20 +992,21 @@ class GcpManagedNetworkConfig:
     It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
     range configurations must be mutually exclusive. An attempt to create a workspace fails if
     Databricks detects an IP range overlap.
-    
+
     Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and
     all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
     `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
-    
+
     The sizes of these IP ranges affect the maximum number of nodes for the workspace.
-    
+
     **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
     workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
     your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
     determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
     Excel spreadsheet. See [calculate subnet sizes for a new workspace].
-    
-    [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html"""
+
+    [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
+    """
 
     gke_cluster_pod_ip_range: Optional[str] = None
     """The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than
@@ -897,28 +1024,32 @@ def as_dict(self) -> dict:
         """Serializes the GcpManagedNetworkConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.gke_cluster_pod_ip_range is not None:
-            body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range
+            body["gke_cluster_pod_ip_range"] = self.gke_cluster_pod_ip_range
         if self.gke_cluster_service_ip_range is not None:
-            body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range
-        if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr
+            body["gke_cluster_service_ip_range"] = self.gke_cluster_service_ip_range
+        if self.subnet_cidr is not None:
+            body["subnet_cidr"] = self.subnet_cidr
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcpManagedNetworkConfig into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.gke_cluster_pod_ip_range is not None:
-            body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range
+            body["gke_cluster_pod_ip_range"] = self.gke_cluster_pod_ip_range
         if self.gke_cluster_service_ip_range is not None:
-            body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range
-        if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr
+            body["gke_cluster_service_ip_range"] = self.gke_cluster_service_ip_range
+        if self.subnet_cidr is not None:
+            body["subnet_cidr"] = self.subnet_cidr
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpManagedNetworkConfig:
         """Deserializes the GcpManagedNetworkConfig from a dictionary."""
-        return cls(gke_cluster_pod_ip_range=d.get('gke_cluster_pod_ip_range', None),
-                   gke_cluster_service_ip_range=d.get('gke_cluster_service_ip_range', None),
-                   subnet_cidr=d.get('subnet_cidr', None))
+        return cls(
+            gke_cluster_pod_ip_range=d.get("gke_cluster_pod_ip_range", None),
+            gke_cluster_service_ip_range=d.get("gke_cluster_service_ip_range", None),
+            subnet_cidr=d.get("subnet_cidr", None),
+        )
 
 
 @dataclass
@@ -950,34 +1081,48 @@ class GcpNetworkInfo:
     def as_dict(self) -> dict:
         """Serializes the GcpNetworkInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.network_project_id is not None: body['network_project_id'] = self.network_project_id
-        if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name
-        if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name
-        if self.subnet_id is not None: body['subnet_id'] = self.subnet_id
-        if self.subnet_region is not None: body['subnet_region'] = self.subnet_region
-        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        if self.network_project_id is not None:
+            body["network_project_id"] = self.network_project_id
+        if self.pod_ip_range_name is not None:
+            body["pod_ip_range_name"] = self.pod_ip_range_name
+        if self.service_ip_range_name is not None:
+            body["service_ip_range_name"] = self.service_ip_range_name
+        if self.subnet_id is not None:
+            body["subnet_id"] = self.subnet_id
+        if self.subnet_region is not None:
+            body["subnet_region"] = self.subnet_region
+        if self.vpc_id is not None:
+            body["vpc_id"] = self.vpc_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcpNetworkInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.network_project_id is not None: body['network_project_id'] = self.network_project_id
-        if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name
-        if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name
-        if self.subnet_id is not None: body['subnet_id'] = self.subnet_id
-        if self.subnet_region is not None: body['subnet_region'] = self.subnet_region
-        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        if self.network_project_id is not None:
+            body["network_project_id"] = self.network_project_id
+        if self.pod_ip_range_name is not None:
+            body["pod_ip_range_name"] = self.pod_ip_range_name
+        if self.service_ip_range_name is not None:
+            body["service_ip_range_name"] = self.service_ip_range_name
+        if self.subnet_id is not None:
+            body["subnet_id"] = self.subnet_id
+        if self.subnet_region is not None:
+            body["subnet_region"] = self.subnet_region
+        if self.vpc_id is not None:
+            body["vpc_id"] = self.vpc_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpNetworkInfo:
         """Deserializes the GcpNetworkInfo from a dictionary."""
-        return cls(network_project_id=d.get('network_project_id', None),
-                   pod_ip_range_name=d.get('pod_ip_range_name', None),
-                   service_ip_range_name=d.get('service_ip_range_name', None),
-                   subnet_id=d.get('subnet_id', None),
-                   subnet_region=d.get('subnet_region', None),
-                   vpc_id=d.get('vpc_id', None))
+        return cls(
+            network_project_id=d.get("network_project_id", None),
+            pod_ip_range_name=d.get("pod_ip_range_name", None),
+            service_ip_range_name=d.get("service_ip_range_name", None),
+            subnet_id=d.get("subnet_id", None),
+            subnet_region=d.get("subnet_region", None),
+            vpc_id=d.get("vpc_id", None),
+        )
 
 
 @dataclass
@@ -1002,31 +1147,43 @@ class GcpVpcEndpointInfo:
     def as_dict(self) -> dict:
         """Serializes the GcpVpcEndpointInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region
-        if self.project_id is not None: body['project_id'] = self.project_id
-        if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id
-        if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name
-        if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id
+        if self.endpoint_region is not None:
+            body["endpoint_region"] = self.endpoint_region
+        if self.project_id is not None:
+            body["project_id"] = self.project_id
+        if self.psc_connection_id is not None:
+            body["psc_connection_id"] = self.psc_connection_id
+        if self.psc_endpoint_name is not None:
+            body["psc_endpoint_name"] = self.psc_endpoint_name
+        if self.service_attachment_id is not None:
+            body["service_attachment_id"] = self.service_attachment_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GcpVpcEndpointInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region
-        if self.project_id is not None: body['project_id'] = self.project_id
-        if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id
-        if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name
-        if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id
+        if self.endpoint_region is not None:
+            body["endpoint_region"] = self.endpoint_region
+        if self.project_id is not None:
+            body["project_id"] = self.project_id
+        if self.psc_connection_id is not None:
+            body["psc_connection_id"] = self.psc_connection_id
+        if self.psc_endpoint_name is not None:
+            body["psc_endpoint_name"] = self.psc_endpoint_name
+        if self.service_attachment_id is not None:
+            body["service_attachment_id"] = self.service_attachment_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpVpcEndpointInfo:
         """Deserializes the GcpVpcEndpointInfo from a dictionary."""
-        return cls(endpoint_region=d.get('endpoint_region', None),
-                   project_id=d.get('project_id', None),
-                   psc_connection_id=d.get('psc_connection_id', None),
-                   psc_endpoint_name=d.get('psc_endpoint_name', None),
-                   service_attachment_id=d.get('service_attachment_id', None))
+        return cls(
+            endpoint_region=d.get("endpoint_region", None),
+            project_id=d.get("project_id", None),
+            psc_connection_id=d.get("psc_connection_id", None),
+            psc_endpoint_name=d.get("psc_endpoint_name", None),
+            service_attachment_id=d.get("service_attachment_id", None),
+        )
 
 
 @dataclass
@@ -1051,35 +1208,41 @@ class GkeConfig:
     def as_dict(self) -> dict:
         """Serializes the GkeConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type.value
-        if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range
+        if self.connectivity_type is not None:
+            body["connectivity_type"] = self.connectivity_type.value
+        if self.master_ip_range is not None:
+            body["master_ip_range"] = self.master_ip_range
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GkeConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type
-        if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range
+        if self.connectivity_type is not None:
+            body["connectivity_type"] = self.connectivity_type
+        if self.master_ip_range is not None:
+            body["master_ip_range"] = self.master_ip_range
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GkeConfig:
         """Deserializes the GkeConfig from a dictionary."""
-        return cls(connectivity_type=_enum(d, 'connectivity_type', GkeConfigConnectivityType),
-                   master_ip_range=d.get('master_ip_range', None))
+        return cls(
+            connectivity_type=_enum(d, "connectivity_type", GkeConfigConnectivityType),
+            master_ip_range=d.get("master_ip_range", None),
+        )
 
 
 class GkeConfigConnectivityType(Enum):
     """Specifies the network connectivity types for the GKE nodes and the GKE master network.
-    
+
     Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes
     will not have public IPs.
-    
+
     Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster
     have public IP addresses."""
 
-    PRIVATE_NODE_PUBLIC_MASTER = 'PRIVATE_NODE_PUBLIC_MASTER'
-    PUBLIC_NODE_PUBLIC_MASTER = 'PUBLIC_NODE_PUBLIC_MASTER'
+    PRIVATE_NODE_PUBLIC_MASTER = "PRIVATE_NODE_PUBLIC_MASTER"
+    PUBLIC_NODE_PUBLIC_MASTER = "PUBLIC_NODE_PUBLIC_MASTER"
 
 
 class KeyUseCase(Enum):
@@ -1087,8 +1250,8 @@ class KeyUseCase(Enum):
     plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and,
     optionally, cluster EBS volumes."""
 
-    MANAGED_SERVICES = 'MANAGED_SERVICES'
-    STORAGE = 'STORAGE'
+    MANAGED_SERVICES = "MANAGED_SERVICES"
+    STORAGE = "STORAGE"
 
 
 @dataclass
@@ -1139,55 +1302,83 @@ class Network:
     def as_dict(self) -> dict:
         """Serializes the Network into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.error_messages: body['error_messages'] = [v.as_dict() for v in self.error_messages]
-        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info.as_dict()
-        if self.network_id is not None: body['network_id'] = self.network_id
-        if self.network_name is not None: body['network_name'] = self.network_name
-        if self.security_group_ids: body['security_group_ids'] = [v for v in self.security_group_ids]
-        if self.subnet_ids: body['subnet_ids'] = [v for v in self.subnet_ids]
-        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints.as_dict()
-        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
-        if self.vpc_status is not None: body['vpc_status'] = self.vpc_status.value
-        if self.warning_messages: body['warning_messages'] = [v.as_dict() for v in self.warning_messages]
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.error_messages:
+            body["error_messages"] = [v.as_dict() for v in self.error_messages]
+        if self.gcp_network_info:
+            body["gcp_network_info"] = self.gcp_network_info.as_dict()
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
+        if self.network_name is not None:
+            body["network_name"] = self.network_name
+        if self.security_group_ids:
+            body["security_group_ids"] = [v for v in self.security_group_ids]
+        if self.subnet_ids:
+            body["subnet_ids"] = [v for v in self.subnet_ids]
+        if self.vpc_endpoints:
+            body["vpc_endpoints"] = self.vpc_endpoints.as_dict()
+        if self.vpc_id is not None:
+            body["vpc_id"] = self.vpc_id
+        if self.vpc_status is not None:
+            body["vpc_status"] = self.vpc_status.value
+        if self.warning_messages:
+            body["warning_messages"] = [v.as_dict() for v in self.warning_messages]
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Network into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.error_messages: body['error_messages'] = self.error_messages
-        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info
-        if self.network_id is not None: body['network_id'] = self.network_id
-        if self.network_name is not None: body['network_name'] = self.network_name
-        if self.security_group_ids: body['security_group_ids'] = self.security_group_ids
-        if self.subnet_ids: body['subnet_ids'] = self.subnet_ids
-        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints
-        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
-        if self.vpc_status is not None: body['vpc_status'] = self.vpc_status
-        if self.warning_messages: body['warning_messages'] = self.warning_messages
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.error_messages:
+            body["error_messages"] = self.error_messages
+        if self.gcp_network_info:
+            body["gcp_network_info"] = self.gcp_network_info
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
+        if self.network_name is not None:
+            body["network_name"] = self.network_name
+        if self.security_group_ids:
+            body["security_group_ids"] = self.security_group_ids
+        if self.subnet_ids:
+            body["subnet_ids"] = self.subnet_ids
+        if self.vpc_endpoints:
+            body["vpc_endpoints"] = self.vpc_endpoints
+        if self.vpc_id is not None:
+            body["vpc_id"] = self.vpc_id
+        if self.vpc_status is not None:
+            body["vpc_status"] = self.vpc_status
+        if self.warning_messages:
+            body["warning_messages"] = self.warning_messages
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Network:
         """Deserializes the Network from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   creation_time=d.get('creation_time', None),
-                   error_messages=_repeated_dict(d, 'error_messages', NetworkHealth),
-                   gcp_network_info=_from_dict(d, 'gcp_network_info', GcpNetworkInfo),
-                   network_id=d.get('network_id', None),
-                   network_name=d.get('network_name', None),
-                   security_group_ids=d.get('security_group_ids', None),
-                   subnet_ids=d.get('subnet_ids', None),
-                   vpc_endpoints=_from_dict(d, 'vpc_endpoints', NetworkVpcEndpoints),
-                   vpc_id=d.get('vpc_id', None),
-                   vpc_status=_enum(d, 'vpc_status', VpcStatus),
-                   warning_messages=_repeated_dict(d, 'warning_messages', NetworkWarning),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            creation_time=d.get("creation_time", None),
+            error_messages=_repeated_dict(d, "error_messages", NetworkHealth),
+            gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo),
+            network_id=d.get("network_id", None),
+            network_name=d.get("network_name", None),
+            security_group_ids=d.get("security_group_ids", None),
+            subnet_ids=d.get("subnet_ids", None),
+            vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints),
+            vpc_id=d.get("vpc_id", None),
+            vpc_status=_enum(d, "vpc_status", VpcStatus),
+            warning_messages=_repeated_dict(d, "warning_messages", NetworkWarning),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -1202,28 +1393,35 @@ class NetworkHealth:
     def as_dict(self) -> dict:
         """Serializes the NetworkHealth into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.error_type is not None: body['error_type'] = self.error_type.value
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.error_type is not None:
+            body["error_type"] = self.error_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NetworkHealth into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.error_type is not None: body['error_type'] = self.error_type
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.error_type is not None:
+            body["error_type"] = self.error_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkHealth:
         """Deserializes the NetworkHealth from a dictionary."""
-        return cls(error_message=d.get('error_message', None), error_type=_enum(d, 'error_type', ErrorType))
+        return cls(
+            error_message=d.get("error_message", None),
+            error_type=_enum(d, "error_type", ErrorType),
+        )
 
 
 @dataclass
 class NetworkVpcEndpoints:
     """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over
     [AWS PrivateLink].
-    
+
     [AWS PrivateLink]: https://aws.amazon.com/privatelink/"""
 
     rest_api: List[str]
@@ -1236,21 +1434,28 @@ class NetworkVpcEndpoints:
     def as_dict(self) -> dict:
         """Serializes the NetworkVpcEndpoints into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dataplane_relay: body['dataplane_relay'] = [v for v in self.dataplane_relay]
-        if self.rest_api: body['rest_api'] = [v for v in self.rest_api]
+        if self.dataplane_relay:
+            body["dataplane_relay"] = [v for v in self.dataplane_relay]
+        if self.rest_api:
+            body["rest_api"] = [v for v in self.rest_api]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NetworkVpcEndpoints into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dataplane_relay: body['dataplane_relay'] = self.dataplane_relay
-        if self.rest_api: body['rest_api'] = self.rest_api
+        if self.dataplane_relay:
+            body["dataplane_relay"] = self.dataplane_relay
+        if self.rest_api:
+            body["rest_api"] = self.rest_api
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkVpcEndpoints:
         """Deserializes the NetworkVpcEndpoints from a dictionary."""
-        return cls(dataplane_relay=d.get('dataplane_relay', None), rest_api=d.get('rest_api', None))
+        return cls(
+            dataplane_relay=d.get("dataplane_relay", None),
+            rest_api=d.get("rest_api", None),
+        )
 
 
 @dataclass
@@ -1264,35 +1469,41 @@ class NetworkWarning:
     def as_dict(self) -> dict:
         """Serializes the NetworkWarning into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.warning_message is not None: body['warning_message'] = self.warning_message
-        if self.warning_type is not None: body['warning_type'] = self.warning_type.value
+        if self.warning_message is not None:
+            body["warning_message"] = self.warning_message
+        if self.warning_type is not None:
+            body["warning_type"] = self.warning_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NetworkWarning into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.warning_message is not None: body['warning_message'] = self.warning_message
-        if self.warning_type is not None: body['warning_type'] = self.warning_type
+        if self.warning_message is not None:
+            body["warning_message"] = self.warning_message
+        if self.warning_type is not None:
+            body["warning_type"] = self.warning_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkWarning:
         """Deserializes the NetworkWarning from a dictionary."""
-        return cls(warning_message=d.get('warning_message', None),
-                   warning_type=_enum(d, 'warning_type', WarningType))
+        return cls(
+            warning_message=d.get("warning_message", None),
+            warning_type=_enum(d, "warning_type", WarningType),
+        )
 
 
 class PricingTier(Enum):
     """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
-    
+
     [AWS Pricing]: https://databricks.com/product/aws-pricing"""
 
-    COMMUNITY_EDITION = 'COMMUNITY_EDITION'
-    DEDICATED = 'DEDICATED'
-    ENTERPRISE = 'ENTERPRISE'
-    PREMIUM = 'PREMIUM'
-    STANDARD = 'STANDARD'
-    UNKNOWN = 'UNKNOWN'
+    COMMUNITY_EDITION = "COMMUNITY_EDITION"
+    DEDICATED = "DEDICATED"
+    ENTERPRISE = "ENTERPRISE"
+    PREMIUM = "PREMIUM"
+    STANDARD = "STANDARD"
+    UNKNOWN = "UNKNOWN"
 
 
 class PrivateAccessLevel(Enum):
@@ -1302,8 +1513,8 @@ class PrivateAccessLevel(Enum):
     your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your
     workspace. For details, see `allowed_vpc_endpoint_ids`."""
 
-    ACCOUNT = 'ACCOUNT'
-    ENDPOINT = 'ENDPOINT'
+    ACCOUNT = "ACCOUNT"
+    ENDPOINT = "ENDPOINT"
 
 
 @dataclass
@@ -1339,43 +1550,53 @@ class PrivateAccessSettings:
     def as_dict(self) -> dict:
         """Serializes the PrivateAccessSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
         if self.allowed_vpc_endpoint_ids:
-            body['allowed_vpc_endpoint_ids'] = [v for v in self.allowed_vpc_endpoint_ids]
+            body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids]
         if self.private_access_level is not None:
-            body['private_access_level'] = self.private_access_level.value
+            body["private_access_level"] = self.private_access_level.value
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.private_access_settings_name is not None:
-            body['private_access_settings_name'] = self.private_access_settings_name
-        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
-        if self.region is not None: body['region'] = self.region
+            body["private_access_settings_name"] = self.private_access_settings_name
+        if self.public_access_enabled is not None:
+            body["public_access_enabled"] = self.public_access_enabled
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PrivateAccessSettings into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids
-        if self.private_access_level is not None: body['private_access_level'] = self.private_access_level
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.allowed_vpc_endpoint_ids:
+            body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids
+        if self.private_access_level is not None:
+            body["private_access_level"] = self.private_access_level
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.private_access_settings_name is not None:
-            body['private_access_settings_name'] = self.private_access_settings_name
-        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
-        if self.region is not None: body['region'] = self.region
+            body["private_access_settings_name"] = self.private_access_settings_name
+        if self.public_access_enabled is not None:
+            body["public_access_enabled"] = self.public_access_enabled
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivateAccessSettings:
         """Deserializes the PrivateAccessSettings from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   allowed_vpc_endpoint_ids=d.get('allowed_vpc_endpoint_ids', None),
-                   private_access_level=_enum(d, 'private_access_level', PrivateAccessLevel),
-                   private_access_settings_id=d.get('private_access_settings_id', None),
-                   private_access_settings_name=d.get('private_access_settings_name', None),
-                   public_access_enabled=d.get('public_access_enabled', None),
-                   region=d.get('region', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None),
+            private_access_level=_enum(d, "private_access_level", PrivateAccessLevel),
+            private_access_settings_id=d.get("private_access_settings_id", None),
+            private_access_settings_name=d.get("private_access_settings_name", None),
+            public_access_enabled=d.get("public_access_enabled", None),
+            region=d.get("region", None),
+        )
 
 
 @dataclass
@@ -1407,19 +1628,21 @@ class RootBucketInfo:
     def as_dict(self) -> dict:
         """Serializes the RootBucketInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        if self.bucket_name is not None:
+            body["bucket_name"] = self.bucket_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RootBucketInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        if self.bucket_name is not None:
+            body["bucket_name"] = self.bucket_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RootBucketInfo:
         """Deserializes the RootBucketInfo from a dictionary."""
-        return cls(bucket_name=d.get('bucket_name', None))
+        return cls(bucket_name=d.get("bucket_name", None))
 
 
 @dataclass
@@ -1442,35 +1665,43 @@ class StorageConfiguration:
     def as_dict(self) -> dict:
         """Serializes the StorageConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info.as_dict()
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.root_bucket_info:
+            body["root_bucket_info"] = self.root_bucket_info.as_dict()
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_configuration_name is not None:
-            body['storage_configuration_name'] = self.storage_configuration_name
+            body["storage_configuration_name"] = self.storage_configuration_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StorageConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.root_bucket_info:
+            body["root_bucket_info"] = self.root_bucket_info
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_configuration_name is not None:
-            body['storage_configuration_name'] = self.storage_configuration_name
+            body["storage_configuration_name"] = self.storage_configuration_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StorageConfiguration:
         """Deserializes the StorageConfiguration from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   creation_time=d.get('creation_time', None),
-                   root_bucket_info=_from_dict(d, 'root_bucket_info', RootBucketInfo),
-                   storage_configuration_id=d.get('storage_configuration_id', None),
-                   storage_configuration_name=d.get('storage_configuration_name', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            creation_time=d.get("creation_time", None),
+            root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo),
+            storage_configuration_id=d.get("storage_configuration_id", None),
+            storage_configuration_name=d.get("storage_configuration_name", None),
+        )
 
 
 @dataclass
@@ -1485,21 +1716,28 @@ class StsRole:
     def as_dict(self) -> dict:
         """Serializes the StsRole into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.external_id is not None: body['external_id'] = self.external_id
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.external_id is not None:
+            body["external_id"] = self.external_id
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StsRole into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.external_id is not None: body['external_id'] = self.external_id
-        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.external_id is not None:
+            body["external_id"] = self.external_id
+        if self.role_arn is not None:
+            body["role_arn"] = self.role_arn
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StsRole:
         """Deserializes the StsRole from a dictionary."""
-        return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None))
+        return cls(
+            external_id=d.get("external_id", None),
+            role_arn=d.get("role_arn", None),
+        )
 
 
 @dataclass
@@ -1565,57 +1803,68 @@ class UpdateWorkspaceRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         if self.managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+            body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.network_id is not None: body['network_id'] = self.network_id
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+            body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         if self.managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+            body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.network_id is not None: body['network_id'] = self.network_id
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+            body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceRequest:
         """Deserializes the UpdateWorkspaceRequest from a dictionary."""
-        return cls(aws_region=d.get('aws_region', None),
-                   credentials_id=d.get('credentials_id', None),
-                   custom_tags=d.get('custom_tags', None),
-                   managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
-                                                                  None),
-                   network_connectivity_config_id=d.get('network_connectivity_config_id', None),
-                   network_id=d.get('network_id', None),
-                   private_access_settings_id=d.get('private_access_settings_id', None),
-                   storage_configuration_id=d.get('storage_configuration_id', None),
-                   storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            aws_region=d.get("aws_region", None),
+            credentials_id=d.get("credentials_id", None),
+            custom_tags=d.get("custom_tags", None),
+            managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None),
+            network_connectivity_config_id=d.get("network_connectivity_config_id", None),
+            network_id=d.get("network_id", None),
+            private_access_settings_id=d.get("private_access_settings_id", None),
+            storage_configuration_id=d.get("storage_configuration_id", None),
+            storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -1660,39 +1909,47 @@ def as_dict(self) -> dict:
         """Serializes the UpsertPrivateAccessSettingsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allowed_vpc_endpoint_ids:
-            body['allowed_vpc_endpoint_ids'] = [v for v in self.allowed_vpc_endpoint_ids]
+            body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids]
         if self.private_access_level is not None:
-            body['private_access_level'] = self.private_access_level.value
+            body["private_access_level"] = self.private_access_level.value
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.private_access_settings_name is not None:
-            body['private_access_settings_name'] = self.private_access_settings_name
-        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
-        if self.region is not None: body['region'] = self.region
+            body["private_access_settings_name"] = self.private_access_settings_name
+        if self.public_access_enabled is not None:
+            body["public_access_enabled"] = self.public_access_enabled
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids
-        if self.private_access_level is not None: body['private_access_level'] = self.private_access_level
+        if self.allowed_vpc_endpoint_ids:
+            body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids
+        if self.private_access_level is not None:
+            body["private_access_level"] = self.private_access_level
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.private_access_settings_name is not None:
-            body['private_access_settings_name'] = self.private_access_settings_name
-        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
-        if self.region is not None: body['region'] = self.region
+            body["private_access_settings_name"] = self.private_access_settings_name
+        if self.public_access_enabled is not None:
+            body["public_access_enabled"] = self.public_access_enabled
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertPrivateAccessSettingsRequest:
         """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary."""
-        return cls(allowed_vpc_endpoint_ids=d.get('allowed_vpc_endpoint_ids', None),
-                   private_access_level=_enum(d, 'private_access_level', PrivateAccessLevel),
-                   private_access_settings_id=d.get('private_access_settings_id', None),
-                   private_access_settings_name=d.get('private_access_settings_name', None),
-                   public_access_enabled=d.get('public_access_enabled', None),
-                   region=d.get('region', None))
+        return cls(
+            allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None),
+            private_access_level=_enum(d, "private_access_level", PrivateAccessLevel),
+            private_access_settings_id=d.get("private_access_settings_id", None),
+            private_access_settings_name=d.get("private_access_settings_name", None),
+            public_access_enabled=d.get("public_access_enabled", None),
+            region=d.get("region", None),
+        )
 
 
 @dataclass
@@ -1742,65 +1999,86 @@ class VpcEndpoint:
     def as_dict(self) -> dict:
         """Serializes the VpcEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_account_id is not None:
+            body["aws_account_id"] = self.aws_account_id
         if self.aws_endpoint_service_id is not None:
-            body['aws_endpoint_service_id'] = self.aws_endpoint_service_id
-        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
-        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict()
-        if self.region is not None: body['region'] = self.region
-        if self.state is not None: body['state'] = self.state
-        if self.use_case is not None: body['use_case'] = self.use_case.value
-        if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id
-        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+            body["aws_endpoint_service_id"] = self.aws_endpoint_service_id
+        if self.aws_vpc_endpoint_id is not None:
+            body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info:
+            body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict()
+        if self.region is not None:
+            body["region"] = self.region
+        if self.state is not None:
+            body["state"] = self.state
+        if self.use_case is not None:
+            body["use_case"] = self.use_case.value
+        if self.vpc_endpoint_id is not None:
+            body["vpc_endpoint_id"] = self.vpc_endpoint_id
+        if self.vpc_endpoint_name is not None:
+            body["vpc_endpoint_name"] = self.vpc_endpoint_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the VpcEndpoint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_account_id is not None:
+            body["aws_account_id"] = self.aws_account_id
         if self.aws_endpoint_service_id is not None:
-            body['aws_endpoint_service_id'] = self.aws_endpoint_service_id
-        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
-        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info
-        if self.region is not None: body['region'] = self.region
-        if self.state is not None: body['state'] = self.state
-        if self.use_case is not None: body['use_case'] = self.use_case
-        if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id
-        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+            body["aws_endpoint_service_id"] = self.aws_endpoint_service_id
+        if self.aws_vpc_endpoint_id is not None:
+            body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info:
+            body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info
+        if self.region is not None:
+            body["region"] = self.region
+        if self.state is not None:
+            body["state"] = self.state
+        if self.use_case is not None:
+            body["use_case"] = self.use_case
+        if self.vpc_endpoint_id is not None:
+            body["vpc_endpoint_id"] = self.vpc_endpoint_id
+        if self.vpc_endpoint_name is not None:
+            body["vpc_endpoint_name"] = self.vpc_endpoint_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VpcEndpoint:
         """Deserializes the VpcEndpoint from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   aws_account_id=d.get('aws_account_id', None),
-                   aws_endpoint_service_id=d.get('aws_endpoint_service_id', None),
-                   aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None),
-                   gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo),
-                   region=d.get('region', None),
-                   state=d.get('state', None),
-                   use_case=_enum(d, 'use_case', EndpointUseCase),
-                   vpc_endpoint_id=d.get('vpc_endpoint_id', None),
-                   vpc_endpoint_name=d.get('vpc_endpoint_name', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            aws_account_id=d.get("aws_account_id", None),
+            aws_endpoint_service_id=d.get("aws_endpoint_service_id", None),
+            aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None),
+            gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo),
+            region=d.get("region", None),
+            state=d.get("state", None),
+            use_case=_enum(d, "use_case", EndpointUseCase),
+            vpc_endpoint_id=d.get("vpc_endpoint_id", None),
+            vpc_endpoint_name=d.get("vpc_endpoint_name", None),
+        )
 
 
 class VpcStatus(Enum):
     """The status of this network configuration object in terms of its use in a workspace: *
-    `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned."""
+    `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.
+    """
 
-    BROKEN = 'BROKEN'
-    UNATTACHED = 'UNATTACHED'
-    VALID = 'VALID'
-    WARNED = 'WARNED'
+    BROKEN = "BROKEN"
+    UNATTACHED = "UNATTACHED"
+    VALID = "VALID"
+    WARNED = "WARNED"
 
 
 class WarningType(Enum):
     """The AWS resource associated with this warning: a subnet or a security group."""
 
-    SECURITY_GROUP = 'securityGroup'
-    SUBNET = 'subnet'
+    SECURITY_GROUP = "securityGroup"
+    SUBNET = "subnet"
 
 
 @dataclass
@@ -1914,117 +2192,150 @@ class Workspace:
     def as_dict(self) -> dict:
         """Serializes the Workspace into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info.as_dict()
-        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.azure_workspace_info:
+            body["azure_workspace_info"] = self.azure_workspace_info.as_dict()
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
         if self.cloud_resource_container:
-            body['cloud_resource_container'] = self.cloud_resource_container.as_dict()
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
-        if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict()
+            body["cloud_resource_container"] = self.cloud_resource_container.as_dict()
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.deployment_name is not None:
+            body["deployment_name"] = self.deployment_name
+        if self.external_customer_info:
+            body["external_customer_info"] = self.external_customer_info.as_dict()
         if self.gcp_managed_network_config:
-            body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
-        if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
+            body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict()
+        if self.gke_config:
+            body["gke_config"] = self.gke_config.as_dict()
         if self.is_no_public_ip_enabled is not None:
-            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
-        if self.location is not None: body['location'] = self.location
+            body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled
+        if self.location is not None:
+            body["location"] = self.location
         if self.managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
-        if self.network_id is not None: body['network_id'] = self.network_id
-        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier.value
+            body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
+        if self.pricing_tier is not None:
+            body["pricing_tier"] = self.pricing_tier.value
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
-        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
-        if self.workspace_status is not None: body['workspace_status'] = self.workspace_status.value
+            body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
+        if self.workspace_name is not None:
+            body["workspace_name"] = self.workspace_name
+        if self.workspace_status is not None:
+            body["workspace_status"] = self.workspace_status.value
         if self.workspace_status_message is not None:
-            body['workspace_status_message'] = self.workspace_status_message
+            body["workspace_status_message"] = self.workspace_status_message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Workspace into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
-        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
-        if self.external_customer_info: body['external_customer_info'] = self.external_customer_info
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.azure_workspace_info:
+            body["azure_workspace_info"] = self.azure_workspace_info
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.cloud_resource_container:
+            body["cloud_resource_container"] = self.cloud_resource_container
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.credentials_id is not None:
+            body["credentials_id"] = self.credentials_id
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
+        if self.deployment_name is not None:
+            body["deployment_name"] = self.deployment_name
+        if self.external_customer_info:
+            body["external_customer_info"] = self.external_customer_info
         if self.gcp_managed_network_config:
-            body['gcp_managed_network_config'] = self.gcp_managed_network_config
-        if self.gke_config: body['gke_config'] = self.gke_config
+            body["gcp_managed_network_config"] = self.gcp_managed_network_config
+        if self.gke_config:
+            body["gke_config"] = self.gke_config
         if self.is_no_public_ip_enabled is not None:
-            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
-        if self.location is not None: body['location'] = self.location
+            body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled
+        if self.location is not None:
+            body["location"] = self.location
         if self.managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
-        if self.network_id is not None: body['network_id'] = self.network_id
-        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier
+            body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None:
+            body["network_id"] = self.network_id
+        if self.pricing_tier is not None:
+            body["pricing_tier"] = self.pricing_tier
         if self.private_access_settings_id is not None:
-            body['private_access_settings_id'] = self.private_access_settings_id
+            body["private_access_settings_id"] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
-            body['storage_configuration_id'] = self.storage_configuration_id
+            body["storage_configuration_id"] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
-        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
-        if self.workspace_status is not None: body['workspace_status'] = self.workspace_status
+            body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
+        if self.workspace_name is not None:
+            body["workspace_name"] = self.workspace_name
+        if self.workspace_status is not None:
+            body["workspace_status"] = self.workspace_status
         if self.workspace_status_message is not None:
-            body['workspace_status_message'] = self.workspace_status_message
+            body["workspace_status_message"] = self.workspace_status_message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Workspace:
         """Deserializes the Workspace from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   aws_region=d.get('aws_region', None),
-                   azure_workspace_info=_from_dict(d, 'azure_workspace_info', AzureWorkspaceInfo),
-                   cloud=d.get('cloud', None),
-                   cloud_resource_container=_from_dict(d, 'cloud_resource_container', CloudResourceContainer),
-                   creation_time=d.get('creation_time', None),
-                   credentials_id=d.get('credentials_id', None),
-                   custom_tags=d.get('custom_tags', None),
-                   deployment_name=d.get('deployment_name', None),
-                   external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo),
-                   gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
-                                                         GcpManagedNetworkConfig),
-                   gke_config=_from_dict(d, 'gke_config', GkeConfig),
-                   is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
-                   location=d.get('location', None),
-                   managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
-                                                                  None),
-                   network_id=d.get('network_id', None),
-                   pricing_tier=_enum(d, 'pricing_tier', PricingTier),
-                   private_access_settings_id=d.get('private_access_settings_id', None),
-                   storage_configuration_id=d.get('storage_configuration_id', None),
-                   storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
-                   workspace_id=d.get('workspace_id', None),
-                   workspace_name=d.get('workspace_name', None),
-                   workspace_status=_enum(d, 'workspace_status', WorkspaceStatus),
-                   workspace_status_message=d.get('workspace_status_message', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            aws_region=d.get("aws_region", None),
+            azure_workspace_info=_from_dict(d, "azure_workspace_info", AzureWorkspaceInfo),
+            cloud=d.get("cloud", None),
+            cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer),
+            creation_time=d.get("creation_time", None),
+            credentials_id=d.get("credentials_id", None),
+            custom_tags=d.get("custom_tags", None),
+            deployment_name=d.get("deployment_name", None),
+            external_customer_info=_from_dict(d, "external_customer_info", ExternalCustomerInfo),
+            gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig),
+            gke_config=_from_dict(d, "gke_config", GkeConfig),
+            is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None),
+            location=d.get("location", None),
+            managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None),
+            network_id=d.get("network_id", None),
+            pricing_tier=_enum(d, "pricing_tier", PricingTier),
+            private_access_settings_id=d.get("private_access_settings_id", None),
+            storage_configuration_id=d.get("storage_configuration_id", None),
+            storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None),
+            workspace_id=d.get("workspace_id", None),
+            workspace_name=d.get("workspace_name", None),
+            workspace_status=_enum(d, "workspace_status", WorkspaceStatus),
+            workspace_status_message=d.get("workspace_status_message", None),
+        )
 
 
 class WorkspaceStatus(Enum):
     """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING`
     initially. Continue to check the status until the status is `RUNNING`."""
 
-    BANNED = 'BANNED'
-    CANCELLING = 'CANCELLING'
-    FAILED = 'FAILED'
-    NOT_PROVISIONED = 'NOT_PROVISIONED'
-    PROVISIONING = 'PROVISIONING'
-    RUNNING = 'RUNNING'
+    BANNED = "BANNED"
+    CANCELLING = "CANCELLING"
+    FAILED = "FAILED"
+    NOT_PROVISIONED = "NOT_PROVISIONED"
+    PROVISIONING = "PROVISIONING"
+    RUNNING = "RUNNING"
 
 
 class CredentialsAPI:
@@ -2036,86 +2347,111 @@ class CredentialsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential:
+    def create(
+        self,
+        credentials_name: str,
+        aws_credentials: CreateCredentialAwsCredentials,
+    ) -> Credential:
         """Create credential configuration.
-        
+
         Creates a Databricks credential configuration that represents cloud cross-account credentials for a
         specified account. Databricks uses this to set up network infrastructure properly to host Databricks
         clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
         ID) in the returned credential object, and configure the required access policy.
-        
+
         Save the response's `credentials_id` field, which is the ID for your new credential configuration
         object.
-        
+
         For information about how to create a new workspace with this API, see [Create a new workspace using
         the Account API]
-        
+
         [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
+
         :param credentials_name: str
           The human-readable name of the credential configuration object.
         :param aws_credentials: :class:`CreateCredentialAwsCredentials`
-        
+
         :returns: :class:`Credential`
         """
         body = {}
-        if aws_credentials is not None: body['aws_credentials'] = aws_credentials.as_dict()
-        if credentials_name is not None: body['credentials_name'] = credentials_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if aws_credentials is not None:
+            body["aws_credentials"] = aws_credentials.as_dict()
+        if credentials_name is not None:
+            body["credentials_name"] = credentials_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/credentials',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/credentials",
+            body=body,
+            headers=headers,
+        )
         return Credential.from_dict(res)
 
     def delete(self, credentials_id: str):
         """Delete credential configuration.
-        
+
         Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
         delete a credential that is associated with any workspace.
-        
+
         :param credentials_id: str
           Databricks Account API credential configuration ID
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}",
+            headers=headers,
+        )
 
     def get(self, credentials_id: str) -> Credential:
         """Get credential configuration.
-        
+
         Gets a Databricks credential configuration object for an account, both specified by ID.
-        
+
         :param credentials_id: str
           Databricks Account API credential configuration ID
-        
+
         :returns: :class:`Credential`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}",
+            headers=headers,
+        )
         return Credential.from_dict(res)
 
     def list(self) -> Iterator[Credential]:
         """Get all credential configurations.
-        
+
         Gets all Databricks credential configurations associated with an account specified by ID.
-        
+
         :returns: Iterator over :class:`Credential`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/credentials', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/credentials",
+            headers=headers,
+        )
         return [Credential.from_dict(v) for v in res]
 
 
@@ -2123,11 +2459,11 @@ class EncryptionKeysAPI:
     """These APIs manage encryption key configurations for this workspace (optional). A key configuration
     encapsulates the AWS KMS key information and some information about how the key configuration can be used.
     There are two possible uses for key configurations:
-    
+
     * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in
     the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can
     be used to encrypt a workspace's DBFS and EBS data in the data plane.
-    
+
     In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview
     feature is available if your account is on the E2 version of the platform. Updating a running workspace
     with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you
@@ -2137,13 +2473,15 @@ class EncryptionKeysAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               use_cases: List[KeyUseCase],
-               *,
-               aws_key_info: Optional[CreateAwsKeyInfo] = None,
-               gcp_key_info: Optional[CreateGcpKeyInfo] = None) -> CustomerManagedKey:
+    def create(
+        self,
+        use_cases: List[KeyUseCase],
+        *,
+        aws_key_info: Optional[CreateAwsKeyInfo] = None,
+        gcp_key_info: Optional[CreateGcpKeyInfo] = None,
+    ) -> CustomerManagedKey:
         """Create encryption key configuration.
-        
+
         Creates a customer-managed key configuration object for an account, specified by ID. This operation
         uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's
         customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
@@ -2151,54 +2489,65 @@ def create(self,
         specified as a workspace's customer-managed key for workspace storage, the key encrypts the
         workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally,
         cluster EBS volume data.
-        
+
         **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
         and AWS regions that currently support creation of Databricks workspaces.
-        
+
         This operation is available only if your account is on the E2 version of the platform or on a select
         custom plan that allows multiple workspaces per account.
-        
+
         :param use_cases: List[:class:`KeyUseCase`]
           The cases that the key can be used for.
         :param aws_key_info: :class:`CreateAwsKeyInfo` (optional)
         :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional)
-        
+
         :returns: :class:`CustomerManagedKey`
         """
         body = {}
-        if aws_key_info is not None: body['aws_key_info'] = aws_key_info.as_dict()
-        if gcp_key_info is not None: body['gcp_key_info'] = gcp_key_info.as_dict()
-        if use_cases is not None: body['use_cases'] = [v.value for v in use_cases]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if aws_key_info is not None:
+            body["aws_key_info"] = aws_key_info.as_dict()
+        if gcp_key_info is not None:
+            body["gcp_key_info"] = gcp_key_info.as_dict()
+        if use_cases is not None:
+            body["use_cases"] = [v.value for v in use_cases]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys",
+            body=body,
+            headers=headers,
+        )
         return CustomerManagedKey.from_dict(res)
 
     def delete(self, customer_managed_key_id: str):
         """Delete encryption key configuration.
-        
+
         Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
         that is associated with a running workspace.
-        
+
         :param customer_managed_key_id: str
           Databricks encryption key configuration ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}",
+            headers=headers,
+        )
 
     def get(self, customer_managed_key_id: str) -> CustomerManagedKey:
         """Get encryption key configuration.
-        
+
         Gets a customer-managed key configuration object for an account, specified by ID. This operation
         uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's
         customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
@@ -2206,48 +2555,55 @@ def get(self, customer_managed_key_id: str) -> CustomerManagedKey:
         specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3
         bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume
         data.
-        
+
         **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
         and AWS regions.
-        
+
         This operation is available only if your account is on the E2 version of the platform.",
-        
+
         :param customer_managed_key_id: str
           Databricks encryption key configuration ID.
-        
+
         :returns: :class:`CustomerManagedKey`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}",
+            headers=headers,
+        )
         return CustomerManagedKey.from_dict(res)
 
     def list(self) -> Iterator[CustomerManagedKey]:
         """Get all encryption key configurations.
-        
+
         Gets all customer-managed key configuration objects for an account. If the key is specified as a
         workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's
         notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history.
         If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the
         workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane.
-        
+
         **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
         and AWS regions.
-        
+
         This operation is available only if your account is on the E2 version of the platform.
-        
+
         :returns: Iterator over :class:`CustomerManagedKey`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys",
+            headers=headers,
+        )
         return [CustomerManagedKey.from_dict(v) for v in res]
 
 
@@ -2258,19 +2614,21 @@ class NetworksAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               network_name: str,
-               *,
-               gcp_network_info: Optional[GcpNetworkInfo] = None,
-               security_group_ids: Optional[List[str]] = None,
-               subnet_ids: Optional[List[str]] = None,
-               vpc_endpoints: Optional[NetworkVpcEndpoints] = None,
-               vpc_id: Optional[str] = None) -> Network:
+    def create(
+        self,
+        network_name: str,
+        *,
+        gcp_network_info: Optional[GcpNetworkInfo] = None,
+        security_group_ids: Optional[List[str]] = None,
+        subnet_ids: Optional[List[str]] = None,
+        vpc_endpoints: Optional[NetworkVpcEndpoints] = None,
+        vpc_id: Optional[str] = None,
+    ) -> Network:
         """Create network configuration.
-        
+
         Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
         used for new Databricks clusters. This requires a pre-existing VPC and subnets.
-        
+
         :param network_name: str
           The human-readable name of the network configuration.
         :param gcp_network_info: :class:`GcpNetworkInfo` (optional)
@@ -2285,80 +2643,105 @@ def create(self,
         :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
           If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
           PrivateLink].
-          
+
           [AWS PrivateLink]: https://aws.amazon.com/privatelink/
         :param vpc_id: str (optional)
           The ID of the VPC associated with this network. VPC IDs can be used in multiple network
           configurations.
-        
+
         :returns: :class:`Network`
         """
         body = {}
-        if gcp_network_info is not None: body['gcp_network_info'] = gcp_network_info.as_dict()
-        if network_name is not None: body['network_name'] = network_name
-        if security_group_ids is not None: body['security_group_ids'] = [v for v in security_group_ids]
-        if subnet_ids is not None: body['subnet_ids'] = [v for v in subnet_ids]
-        if vpc_endpoints is not None: body['vpc_endpoints'] = vpc_endpoints.as_dict()
-        if vpc_id is not None: body['vpc_id'] = vpc_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/networks',
-                           body=body,
-                           headers=headers)
+        if gcp_network_info is not None:
+            body["gcp_network_info"] = gcp_network_info.as_dict()
+        if network_name is not None:
+            body["network_name"] = network_name
+        if security_group_ids is not None:
+            body["security_group_ids"] = [v for v in security_group_ids]
+        if subnet_ids is not None:
+            body["subnet_ids"] = [v for v in subnet_ids]
+        if vpc_endpoints is not None:
+            body["vpc_endpoints"] = vpc_endpoints.as_dict()
+        if vpc_id is not None:
+            body["vpc_id"] = vpc_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/networks",
+            body=body,
+            headers=headers,
+        )
         return Network.from_dict(res)
 
     def delete(self, network_id: str):
         """Delete a network configuration.
-        
+
         Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
         delete a network that is associated with a workspace.
-        
+
         This operation is available only if your account is on the E2 version of the platform.
-        
+
         :param network_id: str
           Databricks Account API network configuration ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/networks/{network_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}",
+            headers=headers,
+        )
 
     def get(self, network_id: str) -> Network:
         """Get a network configuration.
-        
+
         Gets a Databricks network configuration, which represents a cloud VPC and its resources.
-        
+
         :param network_id: str
           Databricks Account API network configuration ID.
-        
+
         :returns: :class:`Network`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/networks/{network_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}",
+            headers=headers,
+        )
         return Network.from_dict(res)
 
     def list(self) -> Iterator[Network]:
         """Get all network configurations.
-        
+
         Gets a list of all Databricks network configurations for an account, specified by ID.
-        
+
         This operation is available only if your account is on the E2 version of the platform.
-        
+
         :returns: Iterator over :class:`Network`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/networks', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/networks",
+            headers=headers,
+        )
         return [Network.from_dict(v) for v in res]
 
 
@@ -2368,28 +2751,30 @@ class PrivateAccessAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               private_access_settings_name: str,
-               region: str,
-               *,
-               allowed_vpc_endpoint_ids: Optional[List[str]] = None,
-               private_access_level: Optional[PrivateAccessLevel] = None,
-               public_access_enabled: Optional[bool] = None) -> PrivateAccessSettings:
+    def create(
+        self,
+        private_access_settings_name: str,
+        region: str,
+        *,
+        allowed_vpc_endpoint_ids: Optional[List[str]] = None,
+        private_access_level: Optional[PrivateAccessLevel] = None,
+        public_access_enabled: Optional[bool] = None,
+    ) -> PrivateAccessSettings:
         """Create private access settings.
-        
+
         Creates a private access settings object, which specifies how your workspace is accessed over [AWS
         PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
         referenced by ID in the workspace's `private_access_settings_id` property.
-        
+
         You can share one private access settings with multiple workspaces in a single account. However,
         private access settings are specific to AWS regions, so only workspaces in the same AWS region can use
         a given private access settings object.
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
+
         :param private_access_settings_name: str
           The human-readable name of the private access settings object.
         :param region: str
@@ -2398,14 +2783,14 @@ def create(self,
           An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
           the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
           AWS.
-          
+
           Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
           that in your account that can connect to your workspace over AWS PrivateLink.
-          
+
           If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
           control only works for PrivateLink connections. To control how your workspace is accessed via public
           internet, see [IP access lists].
-          
+
           [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
         :param private_access_level: :class:`PrivateAccessLevel` (optional)
           The private access level controls which VPC endpoints can connect to the UI or API of any workspace
@@ -2417,117 +2802,137 @@ def create(self,
           Determines if the workspace can be accessed over public internet. For fully private workspaces, you
           can optionally specify `false`, but only if you implement both the front-end and the back-end
           PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
-        
+
         :returns: :class:`PrivateAccessSettings`
         """
         body = {}
         if allowed_vpc_endpoint_ids is not None:
-            body['allowed_vpc_endpoint_ids'] = [v for v in allowed_vpc_endpoint_ids]
-        if private_access_level is not None: body['private_access_level'] = private_access_level.value
+            body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids]
+        if private_access_level is not None:
+            body["private_access_level"] = private_access_level.value
         if private_access_settings_name is not None:
-            body['private_access_settings_name'] = private_access_settings_name
-        if public_access_enabled is not None: body['public_access_enabled'] = public_access_enabled
-        if region is not None: body['region'] = region
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/private-access-settings',
-                           body=body,
-                           headers=headers)
+            body["private_access_settings_name"] = private_access_settings_name
+        if public_access_enabled is not None:
+            body["public_access_enabled"] = public_access_enabled
+        if region is not None:
+            body["region"] = region
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/private-access-settings",
+            body=body,
+            headers=headers,
+        )
         return PrivateAccessSettings.from_dict(res)
 
     def delete(self, private_access_settings_id: str):
         """Delete a private access settings object.
-        
+
         Deletes a private access settings object, which determines how your workspace is accessed over [AWS
         PrivateLink].
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
+
         :param private_access_settings_id: str
           Databricks Account API private access settings ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}",
+            headers=headers,
+        )
 
     def get(self, private_access_settings_id: str) -> PrivateAccessSettings:
         """Get a private access settings object.
-        
+
         Gets a private access settings object, which specifies how your workspace is accessed over [AWS
         PrivateLink].
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
+
         :param private_access_settings_id: str
           Databricks Account API private access settings ID.
-        
+
         :returns: :class:`PrivateAccessSettings`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}",
+            headers=headers,
+        )
         return PrivateAccessSettings.from_dict(res)
 
     def list(self) -> Iterator[PrivateAccessSettings]:
         """Get all private access settings objects.
-        
+
         Gets a list of all private access settings objects for an account, specified by ID.
-        
+
         :returns: Iterator over :class:`PrivateAccessSettings`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/private-access-settings',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/private-access-settings",
+            headers=headers,
+        )
         return [PrivateAccessSettings.from_dict(v) for v in res]
 
-    def replace(self,
-                private_access_settings_id: str,
-                private_access_settings_name: str,
-                region: str,
-                *,
-                allowed_vpc_endpoint_ids: Optional[List[str]] = None,
-                private_access_level: Optional[PrivateAccessLevel] = None,
-                public_access_enabled: Optional[bool] = None):
+    def replace(
+        self,
+        private_access_settings_id: str,
+        private_access_settings_name: str,
+        region: str,
+        *,
+        allowed_vpc_endpoint_ids: Optional[List[str]] = None,
+        private_access_level: Optional[PrivateAccessLevel] = None,
+        public_access_enabled: Optional[bool] = None,
+    ):
         """Replace private access settings.
-        
+
         Updates an existing private access settings object, which specifies how your workspace is accessed
         over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
         referenced by ID in the workspace's `private_access_settings_id` property.
-        
+
         This operation completely overwrites your existing private access settings object attached to your
         workspaces. All workspaces attached to the private access settings are affected by any change. If
         `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of
         these changes might take several minutes to propagate to the workspace API.
-        
+
         You can share one private access settings object with multiple workspaces in a single account.
         However, private access settings are specific to AWS regions, so only workspaces in the same AWS
         region can use a given private access settings object.
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
+
         :param private_access_settings_id: str
           Databricks Account API private access settings ID.
         :param private_access_settings_name: str
@@ -2538,14 +2943,14 @@ def replace(self,
           An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
           the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
           AWS.
-          
+
           Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
           that in your account that can connect to your workspace over AWS PrivateLink.
-          
+
           If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
           control only works for PrivateLink connections. To control how your workspace is accessed via public
           internet, see [IP access lists].
-          
+
           [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
         :param private_access_level: :class:`PrivateAccessLevel` (optional)
           The private access level controls which VPC endpoints can connect to the UI or API of any workspace
@@ -2557,24 +2962,31 @@ def replace(self,
           Determines if the workspace can be accessed over public internet. For fully private workspaces, you
           can optionally specify `false`, but only if you implement both the front-end and the back-end
           PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
-        
-        
+
+
         """
         body = {}
         if allowed_vpc_endpoint_ids is not None:
-            body['allowed_vpc_endpoint_ids'] = [v for v in allowed_vpc_endpoint_ids]
-        if private_access_level is not None: body['private_access_level'] = private_access_level.value
+            body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids]
+        if private_access_level is not None:
+            body["private_access_level"] = private_access_level.value
         if private_access_settings_name is not None:
-            body['private_access_settings_name'] = private_access_settings_name
-        if public_access_enabled is not None: body['public_access_enabled'] = public_access_enabled
-        if region is not None: body['region'] = region
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["private_access_settings_name"] = private_access_settings_name
+        if public_access_enabled is not None:
+            body["public_access_enabled"] = public_access_enabled
+        if region is not None:
+            body["region"] = region
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         self._api.do(
-            'PUT',
-            f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}',
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
 
 
 class StorageAPI:
@@ -2586,90 +2998,105 @@ class StorageAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self, storage_configuration_name: str,
-               root_bucket_info: RootBucketInfo) -> StorageConfiguration:
+    def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration:
         """Create new storage configuration.
-        
+
         Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
         object that represents the root AWS S3 bucket in your account. Databricks stores related workspace
         assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the
         required bucket policy.
-        
+
         For information about how to create a new workspace with this API, see [Create a new workspace using
         the Account API]
-        
+
         [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
+
         :param storage_configuration_name: str
           The human-readable name of the storage configuration.
         :param root_bucket_info: :class:`RootBucketInfo`
           Root S3 bucket information.
-        
+
         :returns: :class:`StorageConfiguration`
         """
         body = {}
-        if root_bucket_info is not None: body['root_bucket_info'] = root_bucket_info.as_dict()
+        if root_bucket_info is not None:
+            body["root_bucket_info"] = root_bucket_info.as_dict()
         if storage_configuration_name is not None:
-            body['storage_configuration_name'] = storage_configuration_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["storage_configuration_name"] = storage_configuration_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/storage-configurations',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/storage-configurations",
+            body=body,
+            headers=headers,
+        )
         return StorageConfiguration.from_dict(res)
 
     def delete(self, storage_configuration_id: str):
         """Delete storage configuration.
-        
+
         Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
         associated with any workspace.
-        
+
         :param storage_configuration_id: str
           Databricks Account API storage configuration ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}",
+            headers=headers,
+        )
 
     def get(self, storage_configuration_id: str) -> StorageConfiguration:
         """Get storage configuration.
-        
+
         Gets a Databricks storage configuration for an account, both specified by ID.
-        
+
         :param storage_configuration_id: str
           Databricks Account API storage configuration ID.
-        
+
         :returns: :class:`StorageConfiguration`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}",
+            headers=headers,
+        )
         return StorageConfiguration.from_dict(res)
 
     def list(self) -> Iterator[StorageConfiguration]:
         """Get all storage configurations.
-        
+
         Gets a list of all Databricks storage configurations for your account, specified by ID.
-        
+
         :returns: Iterator over :class:`StorageConfiguration`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/storage-configurations',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/storage-configurations",
+            headers=headers,
+        )
         return [StorageConfiguration.from_dict(v) for v in res]
 
 
@@ -2679,27 +3106,29 @@ class VpcEndpointsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               vpc_endpoint_name: str,
-               *,
-               aws_vpc_endpoint_id: Optional[str] = None,
-               gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None,
-               region: Optional[str] = None) -> VpcEndpoint:
+    def create(
+        self,
+        vpc_endpoint_name: str,
+        *,
+        aws_vpc_endpoint_id: Optional[str] = None,
+        gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None,
+        region: Optional[str] = None,
+    ) -> VpcEndpoint:
         """Create VPC endpoint configuration.
-        
+
         Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
         communicate privately with Databricks over [AWS PrivateLink].
-        
+
         After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically
         accepts the VPC endpoint.
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
         [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html
         [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html
-        
+
         :param vpc_endpoint_name: str
           The human-readable name of the storage configuration.
         :param aws_vpc_endpoint_id: str (optional)
@@ -2708,83 +3137,106 @@ def create(self,
           The Google Cloud specific information for this Private Service Connect endpoint.
         :param region: str (optional)
           The AWS region in which this VPC endpoint object exists.
-        
+
         :returns: :class:`VpcEndpoint`
         """
         body = {}
-        if aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = aws_vpc_endpoint_id
-        if gcp_vpc_endpoint_info is not None: body['gcp_vpc_endpoint_info'] = gcp_vpc_endpoint_info.as_dict()
-        if region is not None: body['region'] = region
-        if vpc_endpoint_name is not None: body['vpc_endpoint_name'] = vpc_endpoint_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if aws_vpc_endpoint_id is not None:
+            body["aws_vpc_endpoint_id"] = aws_vpc_endpoint_id
+        if gcp_vpc_endpoint_info is not None:
+            body["gcp_vpc_endpoint_info"] = gcp_vpc_endpoint_info.as_dict()
+        if region is not None:
+            body["region"] = region
+        if vpc_endpoint_name is not None:
+            body["vpc_endpoint_name"] = vpc_endpoint_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints",
+            body=body,
+            headers=headers,
+        )
         return VpcEndpoint.from_dict(res)
 
     def delete(self, vpc_endpoint_id: str):
         """Delete VPC endpoint configuration.
-        
+
         Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
         privately with Databricks over [AWS PrivateLink].
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
+
         :param vpc_endpoint_id: str
           Databricks VPC endpoint ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}",
+            headers=headers,
+        )
 
     def get(self, vpc_endpoint_id: str) -> VpcEndpoint:
         """Get a VPC endpoint configuration.
-        
+
         Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
         privately with Databricks over [AWS PrivateLink].
-        
+
         [AWS PrivateLink]: https://aws.amazon.com/privatelink
         [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
-        
+
         :param vpc_endpoint_id: str
           Databricks VPC endpoint ID.
-        
+
         :returns: :class:`VpcEndpoint`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}",
+            headers=headers,
+        )
         return VpcEndpoint.from_dict(res)
 
     def list(self) -> Iterator[VpcEndpoint]:
         """Get all VPC endpoint configurations.
-        
+
         Gets a list of all VPC endpoints for an account, specified by ID.
-        
+
         Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
+
         [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
+
         :returns: Iterator over :class:`VpcEndpoint`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints",
+            headers=headers,
+        )
         return [VpcEndpoint.from_dict(v) for v in res]
 
 
@@ -2792,21 +3244,26 @@ class WorkspacesAPI:
     """These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all
     of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into
     folders, and provides access to data and computational resources such as clusters and jobs.
-    
+
     These endpoints are available if your account is on the E2 version of the platform or on a select custom
     plan that allows multiple workspaces per account."""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_workspace_running(self,
-                                   workspace_id: int,
-                                   timeout=timedelta(minutes=20),
-                                   callback: Optional[Callable[[Workspace], None]] = None) -> Workspace:
+    def wait_get_workspace_running(
+        self,
+        workspace_id: int,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[Workspace], None]] = None,
+    ) -> Workspace:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (WorkspaceStatus.RUNNING, )
-        failure_states = (WorkspaceStatus.BANNED, WorkspaceStatus.FAILED, )
-        status_message = 'polling...'
+        target_states = (WorkspaceStatus.RUNNING,)
+        failure_states = (
+            WorkspaceStatus.BANNED,
+            WorkspaceStatus.FAILED,
+        )
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(workspace_id=workspace_id)
@@ -2817,48 +3274,50 @@ def wait_get_workspace_running(self,
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach RUNNING, got {status}: {status_message}'
+                msg = f"failed to reach RUNNING, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"workspace_id={workspace_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def create(self,
-               workspace_name: str,
-               *,
-               aws_region: Optional[str] = None,
-               cloud: Optional[str] = None,
-               cloud_resource_container: Optional[CloudResourceContainer] = None,
-               credentials_id: Optional[str] = None,
-               custom_tags: Optional[Dict[str, str]] = None,
-               deployment_name: Optional[str] = None,
-               gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
-               gke_config: Optional[GkeConfig] = None,
-               is_no_public_ip_enabled: Optional[bool] = None,
-               location: Optional[str] = None,
-               managed_services_customer_managed_key_id: Optional[str] = None,
-               network_id: Optional[str] = None,
-               pricing_tier: Optional[PricingTier] = None,
-               private_access_settings_id: Optional[str] = None,
-               storage_configuration_id: Optional[str] = None,
-               storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]:
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
+
+    def create(
+        self,
+        workspace_name: str,
+        *,
+        aws_region: Optional[str] = None,
+        cloud: Optional[str] = None,
+        cloud_resource_container: Optional[CloudResourceContainer] = None,
+        credentials_id: Optional[str] = None,
+        custom_tags: Optional[Dict[str, str]] = None,
+        deployment_name: Optional[str] = None,
+        gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
+        gke_config: Optional[GkeConfig] = None,
+        is_no_public_ip_enabled: Optional[bool] = None,
+        location: Optional[str] = None,
+        managed_services_customer_managed_key_id: Optional[str] = None,
+        network_id: Optional[str] = None,
+        pricing_tier: Optional[PricingTier] = None,
+        private_access_settings_id: Optional[str] = None,
+        storage_configuration_id: Optional[str] = None,
+        storage_customer_managed_key_id: Optional[str] = None,
+    ) -> Wait[Workspace]:
         """Create a new workspace.
-        
+
         Creates a new workspace.
-        
+
         **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request
         has been accepted and is in progress, but does not mean that the workspace deployed successfully and
         is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID
         (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests
         with the workspace ID and check its status. The workspace becomes available when the status changes to
         `RUNNING`.
-        
+
         :param workspace_name: str
           The workspace's human-readable name.
         :param aws_region: str (optional)
@@ -2880,22 +3339,22 @@ def create(self,
           deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`.
           Hyphens are allowed. This property supports only the set of characters that are allowed in a
           subdomain.
-          
+
           To set this value, you must have a deployment name prefix. Contact your Databricks account team to
           add an account deployment name prefix to your account.
-          
+
           Workspace deployment names follow the account prefix and a hyphen. For example, if your account's
           deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response
           for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be
           `acme-workspace-1.cloud.databricks.com`.
-          
+
           You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment
           name to only include the deployment prefix. For example, if your account's deployment prefix is
           `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and
           the workspace URL is `acme.cloud.databricks.com`.
-          
+
           This value must be unique across all non-deleted deployments across all AWS regions.
-          
+
           If a new workspace omits this property, the server generates a unique deployment name for you with
           the pattern `dbc-xxxxxxxx-xxxx`.
         :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
@@ -2903,19 +3362,19 @@ def create(self,
           is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
           configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
           detects an IP range overlap.
-          
+
           Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
           addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
           `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
-          
+
           The sizes of these IP ranges affect the maximum number of nodes for the workspace.
-          
+
           **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
           workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
           your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
           determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
           Excel spreadsheet. See [calculate subnet sizes for a new workspace].
-          
+
           [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
         :param gke_config: :class:`GkeConfig` (optional)
           The configurations for the GKE cluster of a Databricks workspace.
@@ -2930,15 +3389,15 @@ def create(self,
         :param network_id: str (optional)
         :param pricing_tier: :class:`PricingTier` (optional)
           The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
-          
+
           [AWS Pricing]: https://databricks.com/product/aws-pricing
         :param private_access_settings_id: str (optional)
           ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
           specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
           back-end (data plane to control plane connection), or both connection types.
-          
+
           Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-          
+
           [AWS PrivateLink]: https://aws.amazon.com/privatelink/
           [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
         :param storage_configuration_id: str (optional)
@@ -2947,43 +3406,62 @@ def create(self,
           The ID of the workspace's storage encryption key configuration object. This is used to encrypt the
           workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The
           provided key configuration object property `use_cases` must contain `STORAGE`.
-        
+
         :returns:
           Long-running operation waiter for :class:`Workspace`.
           See :method:wait_get_workspace_running for more details.
         """
         body = {}
-        if aws_region is not None: body['aws_region'] = aws_region
-        if cloud is not None: body['cloud'] = cloud
+        if aws_region is not None:
+            body["aws_region"] = aws_region
+        if cloud is not None:
+            body["cloud"] = cloud
         if cloud_resource_container is not None:
-            body['cloud_resource_container'] = cloud_resource_container.as_dict()
-        if credentials_id is not None: body['credentials_id'] = credentials_id
-        if custom_tags is not None: body['custom_tags'] = custom_tags
-        if deployment_name is not None: body['deployment_name'] = deployment_name
+            body["cloud_resource_container"] = cloud_resource_container.as_dict()
+        if credentials_id is not None:
+            body["credentials_id"] = credentials_id
+        if custom_tags is not None:
+            body["custom_tags"] = custom_tags
+        if deployment_name is not None:
+            body["deployment_name"] = deployment_name
         if gcp_managed_network_config is not None:
-            body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict()
-        if gke_config is not None: body['gke_config'] = gke_config.as_dict()
-        if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled
-        if location is not None: body['location'] = location
+            body["gcp_managed_network_config"] = gcp_managed_network_config.as_dict()
+        if gke_config is not None:
+            body["gke_config"] = gke_config.as_dict()
+        if is_no_public_ip_enabled is not None:
+            body["is_no_public_ip_enabled"] = is_no_public_ip_enabled
+        if location is not None:
+            body["location"] = location
         if managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id
-        if network_id is not None: body['network_id'] = network_id
-        if pricing_tier is not None: body['pricing_tier'] = pricing_tier.value
+            body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id
+        if network_id is not None:
+            body["network_id"] = network_id
+        if pricing_tier is not None:
+            body["pricing_tier"] = pricing_tier.value
         if private_access_settings_id is not None:
-            body['private_access_settings_id'] = private_access_settings_id
-        if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id
+            body["private_access_settings_id"] = private_access_settings_id
+        if storage_configuration_id is not None:
+            body["storage_configuration_id"] = storage_configuration_id
         if storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = storage_customer_managed_key_id
-        if workspace_name is not None: body['workspace_name'] = workspace_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST',
-                                   f'/api/2.0/accounts/{self._api.account_id}/workspaces',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_workspace_running,
-                    response=Workspace.from_dict(op_response),
-                    workspace_id=op_response['workspace_id'])
+            body["storage_customer_managed_key_id"] = storage_customer_managed_key_id
+        if workspace_name is not None:
+            body["workspace_name"] = workspace_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_workspace_running,
+            response=Workspace.from_dict(op_response),
+            workspace_id=op_response["workspace_id"],
+        )
 
     def create_and_wait(
         self,
@@ -3005,109 +3483,128 @@ def create_and_wait(
         private_access_settings_id: Optional[str] = None,
         storage_configuration_id: Optional[str] = None,
         storage_customer_managed_key_id: Optional[str] = None,
-        timeout=timedelta(minutes=20)) -> Workspace:
-        return self.create(aws_region=aws_region,
-                           cloud=cloud,
-                           cloud_resource_container=cloud_resource_container,
-                           credentials_id=credentials_id,
-                           custom_tags=custom_tags,
-                           deployment_name=deployment_name,
-                           gcp_managed_network_config=gcp_managed_network_config,
-                           gke_config=gke_config,
-                           is_no_public_ip_enabled=is_no_public_ip_enabled,
-                           location=location,
-                           managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
-                           network_id=network_id,
-                           pricing_tier=pricing_tier,
-                           private_access_settings_id=private_access_settings_id,
-                           storage_configuration_id=storage_configuration_id,
-                           storage_customer_managed_key_id=storage_customer_managed_key_id,
-                           workspace_name=workspace_name).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> Workspace:
+        return self.create(
+            aws_region=aws_region,
+            cloud=cloud,
+            cloud_resource_container=cloud_resource_container,
+            credentials_id=credentials_id,
+            custom_tags=custom_tags,
+            deployment_name=deployment_name,
+            gcp_managed_network_config=gcp_managed_network_config,
+            gke_config=gke_config,
+            is_no_public_ip_enabled=is_no_public_ip_enabled,
+            location=location,
+            managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
+            network_id=network_id,
+            pricing_tier=pricing_tier,
+            private_access_settings_id=private_access_settings_id,
+            storage_configuration_id=storage_configuration_id,
+            storage_customer_managed_key_id=storage_customer_managed_key_id,
+            workspace_name=workspace_name,
+        ).result(timeout=timeout)
 
     def delete(self, workspace_id: int):
         """Delete a workspace.
-        
+
         Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
         However, it might take a few minutes for all workspaces resources to be deleted, depending on the size
         and number of workspace resources.
-        
+
         This operation is available only if your account is on the E2 version of the platform or on a select
         custom plan that allows multiple workspaces per account.
-        
+
         :param workspace_id: int
           Workspace ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}",
+            headers=headers,
+        )
 
     def get(self, workspace_id: int) -> Workspace:
         """Get a workspace.
-        
+
         Gets information including status for a Databricks workspace, specified by ID. In the response, the
         `workspace_status` field indicates the current status. After initial workspace creation (which is
         asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace
         becomes available when the status changes to `RUNNING`.
-        
+
         For information about how to create a new workspace with this API **including error handling**, see
         [Create a new workspace using the Account API].
-        
+
         This operation is available only if your account is on the E2 version of the platform or on a select
         custom plan that allows multiple workspaces per account.
-        
+
         [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
+
         :param workspace_id: int
           Workspace ID.
-        
+
         :returns: :class:`Workspace`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}",
+            headers=headers,
+        )
         return Workspace.from_dict(res)
 
     def list(self) -> Iterator[Workspace]:
         """Get all workspaces.
-        
+
         Gets a list of all workspaces associated with an account, specified by ID.
-        
+
         This operation is available only if your account is on the E2 version of the platform or on a select
         custom plan that allows multiple workspaces per account.
-        
+
         :returns: Iterator over :class:`Workspace`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/workspaces', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces",
+            headers=headers,
+        )
         return [Workspace.from_dict(v) for v in res]
 
-    def update(self,
-               workspace_id: int,
-               *,
-               aws_region: Optional[str] = None,
-               credentials_id: Optional[str] = None,
-               custom_tags: Optional[Dict[str, str]] = None,
-               managed_services_customer_managed_key_id: Optional[str] = None,
-               network_connectivity_config_id: Optional[str] = None,
-               network_id: Optional[str] = None,
-               private_access_settings_id: Optional[str] = None,
-               storage_configuration_id: Optional[str] = None,
-               storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]:
+    def update(
+        self,
+        workspace_id: int,
+        *,
+        aws_region: Optional[str] = None,
+        credentials_id: Optional[str] = None,
+        custom_tags: Optional[Dict[str, str]] = None,
+        managed_services_customer_managed_key_id: Optional[str] = None,
+        network_connectivity_config_id: Optional[str] = None,
+        network_id: Optional[str] = None,
+        private_access_settings_id: Optional[str] = None,
+        storage_configuration_id: Optional[str] = None,
+        storage_customer_managed_key_id: Optional[str] = None,
+    ) -> Wait[Workspace]:
         """Update workspace configuration.
-        
+
         Updates a workspace configuration for either a running workspace or a failed workspace. The elements
         that can be updated varies between these two use cases.
-        
+
         ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace
         deployment for some fields, but not all fields. For a failed workspace, this request supports updates
         to the following fields only: - Credential configuration ID - Storage configuration ID - Network
@@ -3129,14 +3626,14 @@ def update(self,
         update the network connectivity configuration ID to ensure the workspace uses the same set of stable
         IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from
         the workspace once attached, you can only switch to another one.
-        
+
         After calling the `PATCH` operation to update the workspace configuration, make repeated `GET`
         requests with the workspace ID and check the workspace status. The workspace is successful if the
         status changes to `RUNNING`.
-        
+
         For information about how to create a new workspace with this API **including error handling**, see
         [Create a new workspace using the Account API].
-        
+
         ### Update a running workspace You can update a Databricks workspace configuration for running
         workspaces for some fields, but not all fields. For a running workspace, this request supports
         updating the following fields only: - Credential configuration ID - Network configuration ID. Used
@@ -3162,12 +3659,12 @@ def update(self,
         network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR
         blocks to access your resources. You cannot remove a network connectivity configuration from the
         workspace once attached, you can only switch to another one.
-        
+
         **Important**: To update a running workspace, your workspace must have no running compute resources
         that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose
         clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not
         terminate all cluster instances in the workspace before calling this API, the request will fail.
-        
+
         ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace
         configuration, make repeated `GET` requests with the workspace ID and check the workspace status and
         the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes
@@ -3183,22 +3680,22 @@ def update(self,
         silently to its original configuration. After the workspace has been updated, you cannot use or create
         clusters for another 20 minutes. If you create or use clusters before this time interval elapses,
         clusters do not launch successfully, fail, or could cause other unexpected behavior.
-        
+
         If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes
         to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to
         the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20
         minute wait.
-        
+
         **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment
         types and subscription types. If you have questions about availability, contact your Databricks
         representative.
-        
+
         This operation is available only if your account is on the E2 version of the platform or on a select
         custom plan that allows multiple workspaces per account.
-        
+
         [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html
         [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
+
         :param workspace_id: int
           Workspace ID.
         :param aws_region: str (optional)
@@ -3228,34 +3725,46 @@ def update(self,
         :param storage_customer_managed_key_id: str (optional)
           The ID of the key configuration object for workspace storage. This parameter is available for
           updating both failed and running workspaces.
-        
+
         :returns:
           Long-running operation waiter for :class:`Workspace`.
           See :method:wait_get_workspace_running for more details.
         """
         body = {}
-        if aws_region is not None: body['aws_region'] = aws_region
-        if credentials_id is not None: body['credentials_id'] = credentials_id
-        if custom_tags is not None: body['custom_tags'] = custom_tags
+        if aws_region is not None:
+            body["aws_region"] = aws_region
+        if credentials_id is not None:
+            body["credentials_id"] = credentials_id
+        if custom_tags is not None:
+            body["custom_tags"] = custom_tags
         if managed_services_customer_managed_key_id is not None:
-            body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id
+            body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id
         if network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = network_connectivity_config_id
-        if network_id is not None: body['network_id'] = network_id
+            body["network_connectivity_config_id"] = network_connectivity_config_id
+        if network_id is not None:
+            body["network_id"] = network_id
         if private_access_settings_id is not None:
-            body['private_access_settings_id'] = private_access_settings_id
-        if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id
+            body["private_access_settings_id"] = private_access_settings_id
+        if storage_configuration_id is not None:
+            body["storage_configuration_id"] = storage_configuration_id
         if storage_customer_managed_key_id is not None:
-            body['storage_customer_managed_key_id'] = storage_customer_managed_key_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('PATCH',
-                                   f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_workspace_running,
-                    response=UpdateResponse.from_dict(op_response),
-                    workspace_id=workspace_id)
+            body["storage_customer_managed_key_id"] = storage_customer_managed_key_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_workspace_running,
+            response=UpdateResponse.from_dict(op_response),
+            workspace_id=workspace_id,
+        )
 
     def update_and_wait(
         self,
@@ -3270,14 +3779,17 @@ def update_and_wait(
         private_access_settings_id: Optional[str] = None,
         storage_configuration_id: Optional[str] = None,
         storage_customer_managed_key_id: Optional[str] = None,
-        timeout=timedelta(minutes=20)) -> Workspace:
-        return self.update(aws_region=aws_region,
-                           credentials_id=credentials_id,
-                           custom_tags=custom_tags,
-                           managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
-                           network_connectivity_config_id=network_connectivity_config_id,
-                           network_id=network_id,
-                           private_access_settings_id=private_access_settings_id,
-                           storage_configuration_id=storage_configuration_id,
-                           storage_customer_managed_key_id=storage_customer_managed_key_id,
-                           workspace_id=workspace_id).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> Workspace:
+        return self.update(
+            aws_region=aws_region,
+            credentials_id=credentials_id,
+            custom_tags=custom_tags,
+            managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
+            network_connectivity_config_id=network_connectivity_config_id,
+            network_id=network_id,
+            private_access_settings_id=private_access_settings_id,
+            storage_configuration_id=storage_configuration_id,
+            storage_customer_managed_key_id=storage_customer_managed_key_id,
+            workspace_id=workspace_id,
+        ).result(timeout=timeout)
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 938445863..6a8013f68 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -15,7 +15,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -35,24 +35,28 @@ class Ai21LabsConfig:
     def as_dict(self) -> dict:
         """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key
+        if self.ai21labs_api_key is not None:
+            body["ai21labs_api_key"] = self.ai21labs_api_key
         if self.ai21labs_api_key_plaintext is not None:
-            body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
+            body["ai21labs_api_key_plaintext"] = self.ai21labs_api_key_plaintext
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Ai21LabsConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key
+        if self.ai21labs_api_key is not None:
+            body["ai21labs_api_key"] = self.ai21labs_api_key
         if self.ai21labs_api_key_plaintext is not None:
-            body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
+            body["ai21labs_api_key_plaintext"] = self.ai21labs_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig:
         """Deserializes the Ai21LabsConfig from a dictionary."""
-        return cls(ai21labs_api_key=d.get('ai21labs_api_key', None),
-                   ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None))
+        return cls(
+            ai21labs_api_key=d.get("ai21labs_api_key", None),
+            ai21labs_api_key_plaintext=d.get("ai21labs_api_key_plaintext", None),
+        )
 
 
 @dataclass
@@ -75,29 +79,38 @@ class AiGatewayConfig:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
-        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
-        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
-        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        if self.guardrails:
+            body["guardrails"] = self.guardrails.as_dict()
+        if self.inference_table_config:
+            body["inference_table_config"] = self.inference_table_config.as_dict()
+        if self.rate_limits:
+            body["rate_limits"] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config:
+            body["usage_tracking_config"] = self.usage_tracking_config.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.guardrails: body['guardrails'] = self.guardrails
-        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
-        if self.rate_limits: body['rate_limits'] = self.rate_limits
-        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        if self.guardrails:
+            body["guardrails"] = self.guardrails
+        if self.inference_table_config:
+            body["inference_table_config"] = self.inference_table_config
+        if self.rate_limits:
+            body["rate_limits"] = self.rate_limits
+        if self.usage_tracking_config:
+            body["usage_tracking_config"] = self.usage_tracking_config
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayConfig:
         """Deserializes the AiGatewayConfig from a dictionary."""
-        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
-                   inference_table_config=_from_dict(d, 'inference_table_config',
-                                                     AiGatewayInferenceTableConfig),
-                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
-                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+        return cls(
+            guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails),
+            inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig),
+            rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit),
+            usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig),
+        )
 
 
 @dataclass
@@ -119,28 +132,38 @@ class AiGatewayGuardrailParameters:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayGuardrailParameters into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.invalid_keywords: body['invalid_keywords'] = [v for v in self.invalid_keywords]
-        if self.pii: body['pii'] = self.pii.as_dict()
-        if self.safety is not None: body['safety'] = self.safety
-        if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics]
+        if self.invalid_keywords:
+            body["invalid_keywords"] = [v for v in self.invalid_keywords]
+        if self.pii:
+            body["pii"] = self.pii.as_dict()
+        if self.safety is not None:
+            body["safety"] = self.safety
+        if self.valid_topics:
+            body["valid_topics"] = [v for v in self.valid_topics]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayGuardrailParameters into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.invalid_keywords: body['invalid_keywords'] = self.invalid_keywords
-        if self.pii: body['pii'] = self.pii
-        if self.safety is not None: body['safety'] = self.safety
-        if self.valid_topics: body['valid_topics'] = self.valid_topics
+        if self.invalid_keywords:
+            body["invalid_keywords"] = self.invalid_keywords
+        if self.pii:
+            body["pii"] = self.pii
+        if self.safety is not None:
+            body["safety"] = self.safety
+        if self.valid_topics:
+            body["valid_topics"] = self.valid_topics
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
         """Deserializes the AiGatewayGuardrailParameters from a dictionary."""
-        return cls(invalid_keywords=d.get('invalid_keywords', None),
-                   pii=_from_dict(d, 'pii', AiGatewayGuardrailPiiBehavior),
-                   safety=d.get('safety', None),
-                   valid_topics=d.get('valid_topics', None))
+        return cls(
+            invalid_keywords=d.get("invalid_keywords", None),
+            pii=_from_dict(d, "pii", AiGatewayGuardrailPiiBehavior),
+            safety=d.get("safety", None),
+            valid_topics=d.get("valid_topics", None),
+        )
 
 
 @dataclass
@@ -151,25 +174,27 @@ class AiGatewayGuardrailPiiBehavior:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.behavior is not None: body['behavior'] = self.behavior.value
+        if self.behavior is not None:
+            body["behavior"] = self.behavior.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayGuardrailPiiBehavior into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.behavior is not None: body['behavior'] = self.behavior
+        if self.behavior is not None:
+            body["behavior"] = self.behavior
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
         """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary."""
-        return cls(behavior=_enum(d, 'behavior', AiGatewayGuardrailPiiBehaviorBehavior))
+        return cls(behavior=_enum(d, "behavior", AiGatewayGuardrailPiiBehaviorBehavior))
 
 
 class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
 
-    BLOCK = 'BLOCK'
-    NONE = 'NONE'
+    BLOCK = "BLOCK"
+    NONE = "NONE"
 
 
 @dataclass
@@ -183,22 +208,28 @@ class AiGatewayGuardrails:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayGuardrails into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.input: body['input'] = self.input.as_dict()
-        if self.output: body['output'] = self.output.as_dict()
+        if self.input:
+            body["input"] = self.input.as_dict()
+        if self.output:
+            body["output"] = self.output.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayGuardrails into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.input: body['input'] = self.input
-        if self.output: body['output'] = self.output
+        if self.input:
+            body["input"] = self.input
+        if self.output:
+            body["output"] = self.output
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrails:
         """Deserializes the AiGatewayGuardrails from a dictionary."""
-        return cls(input=_from_dict(d, 'input', AiGatewayGuardrailParameters),
-                   output=_from_dict(d, 'output', AiGatewayGuardrailParameters))
+        return cls(
+            input=_from_dict(d, "input", AiGatewayGuardrailParameters),
+            output=_from_dict(d, "output", AiGatewayGuardrailParameters),
+        )
 
 
 @dataclass
@@ -221,28 +252,38 @@ class AiGatewayInferenceTableConfig:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayInferenceTableConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.table_name_prefix is not None:
+            body["table_name_prefix"] = self.table_name_prefix
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayInferenceTableConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.table_name_prefix is not None:
+            body["table_name_prefix"] = self.table_name_prefix
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayInferenceTableConfig:
         """Deserializes the AiGatewayInferenceTableConfig from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   enabled=d.get('enabled', None),
-                   schema_name=d.get('schema_name', None),
-                   table_name_prefix=d.get('table_name_prefix', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            enabled=d.get("enabled", None),
+            schema_name=d.get("schema_name", None),
+            table_name_prefix=d.get("table_name_prefix", None),
+        )
 
 
 @dataclass
@@ -260,36 +301,44 @@ class AiGatewayRateLimit:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.calls is not None: body['calls'] = self.calls
-        if self.key is not None: body['key'] = self.key.value
-        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
+        if self.calls is not None:
+            body["calls"] = self.calls
+        if self.key is not None:
+            body["key"] = self.key.value
+        if self.renewal_period is not None:
+            body["renewal_period"] = self.renewal_period.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayRateLimit into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.calls is not None: body['calls'] = self.calls
-        if self.key is not None: body['key'] = self.key
-        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period
+        if self.calls is not None:
+            body["calls"] = self.calls
+        if self.key is not None:
+            body["key"] = self.key
+        if self.renewal_period is not None:
+            body["renewal_period"] = self.renewal_period
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
         """Deserializes the AiGatewayRateLimit from a dictionary."""
-        return cls(calls=d.get('calls', None),
-                   key=_enum(d, 'key', AiGatewayRateLimitKey),
-                   renewal_period=_enum(d, 'renewal_period', AiGatewayRateLimitRenewalPeriod))
+        return cls(
+            calls=d.get("calls", None),
+            key=_enum(d, "key", AiGatewayRateLimitKey),
+            renewal_period=_enum(d, "renewal_period", AiGatewayRateLimitRenewalPeriod),
+        )
 
 
 class AiGatewayRateLimitKey(Enum):
 
-    ENDPOINT = 'endpoint'
-    USER = 'user'
+    ENDPOINT = "endpoint"
+    USER = "user"
 
 
 class AiGatewayRateLimitRenewalPeriod(Enum):
 
-    MINUTE = 'minute'
+    MINUTE = "minute"
 
 
 @dataclass
@@ -300,19 +349,21 @@ class AiGatewayUsageTrackingConfig:
     def as_dict(self) -> dict:
         """Serializes the AiGatewayUsageTrackingConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AiGatewayUsageTrackingConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayUsageTrackingConfig:
         """Deserializes the AiGatewayUsageTrackingConfig from a dictionary."""
-        return cls(enabled=d.get('enabled', None))
+        return cls(enabled=d.get("enabled", None))
 
 
 @dataclass
@@ -351,46 +402,56 @@ class AmazonBedrockConfig:
     def as_dict(self) -> dict:
         """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
+        if self.aws_access_key_id is not None:
+            body["aws_access_key_id"] = self.aws_access_key_id
         if self.aws_access_key_id_plaintext is not None:
-            body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
+            body["aws_access_key_id_plaintext"] = self.aws_access_key_id_plaintext
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.aws_secret_access_key is not None:
+            body["aws_secret_access_key"] = self.aws_secret_access_key
         if self.aws_secret_access_key_plaintext is not None:
-            body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext
-        if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
+            body["aws_secret_access_key_plaintext"] = self.aws_secret_access_key_plaintext
+        if self.bedrock_provider is not None:
+            body["bedrock_provider"] = self.bedrock_provider.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AmazonBedrockConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
+        if self.aws_access_key_id is not None:
+            body["aws_access_key_id"] = self.aws_access_key_id
         if self.aws_access_key_id_plaintext is not None:
-            body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext
-        if self.aws_region is not None: body['aws_region'] = self.aws_region
-        if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
+            body["aws_access_key_id_plaintext"] = self.aws_access_key_id_plaintext
+        if self.aws_region is not None:
+            body["aws_region"] = self.aws_region
+        if self.aws_secret_access_key is not None:
+            body["aws_secret_access_key"] = self.aws_secret_access_key
         if self.aws_secret_access_key_plaintext is not None:
-            body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext
-        if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider
+            body["aws_secret_access_key_plaintext"] = self.aws_secret_access_key_plaintext
+        if self.bedrock_provider is not None:
+            body["bedrock_provider"] = self.bedrock_provider
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
         """Deserializes the AmazonBedrockConfig from a dictionary."""
-        return cls(aws_access_key_id=d.get('aws_access_key_id', None),
-                   aws_access_key_id_plaintext=d.get('aws_access_key_id_plaintext', None),
-                   aws_region=d.get('aws_region', None),
-                   aws_secret_access_key=d.get('aws_secret_access_key', None),
-                   aws_secret_access_key_plaintext=d.get('aws_secret_access_key_plaintext', None),
-                   bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider))
+        return cls(
+            aws_access_key_id=d.get("aws_access_key_id", None),
+            aws_access_key_id_plaintext=d.get("aws_access_key_id_plaintext", None),
+            aws_region=d.get("aws_region", None),
+            aws_secret_access_key=d.get("aws_secret_access_key", None),
+            aws_secret_access_key_plaintext=d.get("aws_secret_access_key_plaintext", None),
+            bedrock_provider=_enum(d, "bedrock_provider", AmazonBedrockConfigBedrockProvider),
+        )
 
 
 class AmazonBedrockConfigBedrockProvider(Enum):
 
-    AI21LABS = 'ai21labs'
-    AMAZON = 'amazon'
-    ANTHROPIC = 'anthropic'
-    COHERE = 'cohere'
+    AI21LABS = "ai21labs"
+    AMAZON = "amazon"
+    ANTHROPIC = "anthropic"
+    COHERE = "cohere"
 
 
 @dataclass
@@ -408,24 +469,28 @@ class AnthropicConfig:
     def as_dict(self) -> dict:
         """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key
+        if self.anthropic_api_key is not None:
+            body["anthropic_api_key"] = self.anthropic_api_key
         if self.anthropic_api_key_plaintext is not None:
-            body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
+            body["anthropic_api_key_plaintext"] = self.anthropic_api_key_plaintext
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AnthropicConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key
+        if self.anthropic_api_key is not None:
+            body["anthropic_api_key"] = self.anthropic_api_key
         if self.anthropic_api_key_plaintext is not None:
-            body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
+            body["anthropic_api_key_plaintext"] = self.anthropic_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig:
         """Deserializes the AnthropicConfig from a dictionary."""
-        return cls(anthropic_api_key=d.get('anthropic_api_key', None),
-                   anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None))
+        return cls(
+            anthropic_api_key=d.get("anthropic_api_key", None),
+            anthropic_api_key_plaintext=d.get("anthropic_api_key_plaintext", None),
+        )
 
 
 @dataclass
@@ -448,28 +513,38 @@ class AutoCaptureConfigInput:
     def as_dict(self) -> dict:
         """Serializes the AutoCaptureConfigInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.table_name_prefix is not None:
+            body["table_name_prefix"] = self.table_name_prefix
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AutoCaptureConfigInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.table_name_prefix is not None:
+            body["table_name_prefix"] = self.table_name_prefix
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput:
         """Deserializes the AutoCaptureConfigInput from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   enabled=d.get('enabled', None),
-                   schema_name=d.get('schema_name', None),
-                   table_name_prefix=d.get('table_name_prefix', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            enabled=d.get("enabled", None),
+            schema_name=d.get("schema_name", None),
+            table_name_prefix=d.get("table_name_prefix", None),
+        )
 
 
 @dataclass
@@ -494,31 +569,43 @@ class AutoCaptureConfigOutput:
     def as_dict(self) -> dict:
         """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.state: body['state'] = self.state.as_dict()
-        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.table_name_prefix is not None:
+            body["table_name_prefix"] = self.table_name_prefix
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AutoCaptureConfigOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        if self.state: body['state'] = self.state
-        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        if self.catalog_name is not None:
+            body["catalog_name"] = self.catalog_name
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.schema_name is not None:
+            body["schema_name"] = self.schema_name
+        if self.state:
+            body["state"] = self.state
+        if self.table_name_prefix is not None:
+            body["table_name_prefix"] = self.table_name_prefix
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigOutput:
         """Deserializes the AutoCaptureConfigOutput from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   enabled=d.get('enabled', None),
-                   schema_name=d.get('schema_name', None),
-                   state=_from_dict(d, 'state', AutoCaptureState),
-                   table_name_prefix=d.get('table_name_prefix', None))
+        return cls(
+            catalog_name=d.get("catalog_name", None),
+            enabled=d.get("enabled", None),
+            schema_name=d.get("schema_name", None),
+            state=_from_dict(d, "state", AutoCaptureState),
+            table_name_prefix=d.get("table_name_prefix", None),
+        )
 
 
 @dataclass
@@ -528,19 +615,21 @@ class AutoCaptureState:
     def as_dict(self) -> dict:
         """Serializes the AutoCaptureState into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.payload_table: body['payload_table'] = self.payload_table.as_dict()
+        if self.payload_table:
+            body["payload_table"] = self.payload_table.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AutoCaptureState into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.payload_table: body['payload_table'] = self.payload_table
+        if self.payload_table:
+            body["payload_table"] = self.payload_table
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureState:
         """Deserializes the AutoCaptureState from a dictionary."""
-        return cls(payload_table=_from_dict(d, 'payload_table', PayloadTable))
+        return cls(payload_table=_from_dict(d, "payload_table", PayloadTable))
 
 
 @dataclass
@@ -551,19 +640,21 @@ class BuildLogsResponse:
     def as_dict(self) -> dict:
         """Serializes the BuildLogsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.logs is not None: body['logs'] = self.logs
+        if self.logs is not None:
+            body["logs"] = self.logs
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BuildLogsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.logs is not None: body['logs'] = self.logs
+        if self.logs is not None:
+            body["logs"] = self.logs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BuildLogsResponse:
         """Deserializes the BuildLogsResponse from a dictionary."""
-        return cls(logs=d.get('logs', None))
+        return cls(logs=d.get("logs", None))
 
 
 @dataclass
@@ -577,29 +668,36 @@ class ChatMessage:
     def as_dict(self) -> dict:
         """Serializes the ChatMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.role is not None: body['role'] = self.role.value
+        if self.content is not None:
+            body["content"] = self.content
+        if self.role is not None:
+            body["role"] = self.role.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ChatMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.role is not None: body['role'] = self.role
+        if self.content is not None:
+            body["content"] = self.content
+        if self.role is not None:
+            body["role"] = self.role
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChatMessage:
         """Deserializes the ChatMessage from a dictionary."""
-        return cls(content=d.get('content', None), role=_enum(d, 'role', ChatMessageRole))
+        return cls(
+            content=d.get("content", None),
+            role=_enum(d, "role", ChatMessageRole),
+        )
 
 
 class ChatMessageRole(Enum):
     """The role of the message. One of [system, user, assistant]."""
 
-    ASSISTANT = 'assistant'
-    SYSTEM = 'system'
-    USER = 'user'
+    ASSISTANT = "assistant"
+    SYSTEM = "system"
+    USER = "user"
 
 
 @dataclass
@@ -621,27 +719,33 @@ class CohereConfig:
     def as_dict(self) -> dict:
         """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base
-        if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key
+        if self.cohere_api_base is not None:
+            body["cohere_api_base"] = self.cohere_api_base
+        if self.cohere_api_key is not None:
+            body["cohere_api_key"] = self.cohere_api_key
         if self.cohere_api_key_plaintext is not None:
-            body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
+            body["cohere_api_key_plaintext"] = self.cohere_api_key_plaintext
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CohereConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base
-        if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key
+        if self.cohere_api_base is not None:
+            body["cohere_api_base"] = self.cohere_api_base
+        if self.cohere_api_key is not None:
+            body["cohere_api_key"] = self.cohere_api_key
         if self.cohere_api_key_plaintext is not None:
-            body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
+            body["cohere_api_key_plaintext"] = self.cohere_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CohereConfig:
         """Deserializes the CohereConfig from a dictionary."""
-        return cls(cohere_api_base=d.get('cohere_api_base', None),
-                   cohere_api_key=d.get('cohere_api_key', None),
-                   cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None))
+        return cls(
+            cohere_api_base=d.get("cohere_api_base", None),
+            cohere_api_key=d.get("cohere_api_key", None),
+            cohere_api_key_plaintext=d.get("cohere_api_key_plaintext", None),
+        )
 
 
 @dataclass
@@ -670,34 +774,48 @@ class CreateServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
-        if self.config: body['config'] = self.config.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
-        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.ai_gateway:
+            body["ai_gateway"] = self.ai_gateway.as_dict()
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rate_limits:
+            body["rate_limits"] = [v.as_dict() for v in self.rate_limits]
+        if self.route_optimized is not None:
+            body["route_optimized"] = self.route_optimized
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
-        if self.config: body['config'] = self.config
-        if self.name is not None: body['name'] = self.name
-        if self.rate_limits: body['rate_limits'] = self.rate_limits
-        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
-        if self.tags: body['tags'] = self.tags
+        if self.ai_gateway:
+            body["ai_gateway"] = self.ai_gateway
+        if self.config:
+            body["config"] = self.config
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rate_limits:
+            body["rate_limits"] = self.rate_limits
+        if self.route_optimized is not None:
+            body["route_optimized"] = self.route_optimized
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
         """Deserializes the CreateServingEndpoint from a dictionary."""
-        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
-                   config=_from_dict(d, 'config', EndpointCoreConfigInput),
-                   name=d.get('name', None),
-                   rate_limits=_repeated_dict(d, 'rate_limits', RateLimit),
-                   route_optimized=d.get('route_optimized', None),
-                   tags=_repeated_dict(d, 'tags', EndpointTag))
+        return cls(
+            ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig),
+            config=_from_dict(d, "config", EndpointCoreConfigInput),
+            name=d.get("name", None),
+            rate_limits=_repeated_dict(d, "rate_limits", RateLimit),
+            route_optimized=d.get("route_optimized", None),
+            tags=_repeated_dict(d, "tags", EndpointTag),
+        )
 
 
 @dataclass
@@ -713,22 +831,28 @@ class DataPlaneInfo:
     def as_dict(self) -> dict:
         """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        if self.authorization_details is not None:
+            body["authorization_details"] = self.authorization_details
+        if self.endpoint_url is not None:
+            body["endpoint_url"] = self.endpoint_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        if self.authorization_details is not None:
+            body["authorization_details"] = self.authorization_details
+        if self.endpoint_url is not None:
+            body["endpoint_url"] = self.endpoint_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo:
         """Deserializes the DataPlaneInfo from a dictionary."""
-        return cls(authorization_details=d.get('authorization_details', None),
-                   endpoint_url=d.get('endpoint_url', None))
+        return cls(
+            authorization_details=d.get("authorization_details", None),
+            endpoint_url=d.get("endpoint_url", None),
+        )
 
 
 @dataclass
@@ -754,29 +878,33 @@ class DatabricksModelServingConfig:
     def as_dict(self) -> dict:
         """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token
+        if self.databricks_api_token is not None:
+            body["databricks_api_token"] = self.databricks_api_token
         if self.databricks_api_token_plaintext is not None:
-            body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext
+            body["databricks_api_token_plaintext"] = self.databricks_api_token_plaintext
         if self.databricks_workspace_url is not None:
-            body['databricks_workspace_url'] = self.databricks_workspace_url
+            body["databricks_workspace_url"] = self.databricks_workspace_url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DatabricksModelServingConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token
+        if self.databricks_api_token is not None:
+            body["databricks_api_token"] = self.databricks_api_token
         if self.databricks_api_token_plaintext is not None:
-            body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext
+            body["databricks_api_token_plaintext"] = self.databricks_api_token_plaintext
         if self.databricks_workspace_url is not None:
-            body['databricks_workspace_url'] = self.databricks_workspace_url
+            body["databricks_workspace_url"] = self.databricks_workspace_url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig:
         """Deserializes the DatabricksModelServingConfig from a dictionary."""
-        return cls(databricks_api_token=d.get('databricks_api_token', None),
-                   databricks_api_token_plaintext=d.get('databricks_api_token_plaintext', None),
-                   databricks_workspace_url=d.get('databricks_workspace_url', None))
+        return cls(
+            databricks_api_token=d.get("databricks_api_token", None),
+            databricks_api_token_plaintext=d.get("databricks_api_token_plaintext", None),
+            databricks_workspace_url=d.get("databricks_workspace_url", None),
+        )
 
 
 @dataclass
@@ -790,23 +918,33 @@ class DataframeSplitInput:
     def as_dict(self) -> dict:
         """Serializes the DataframeSplitInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns: body['columns'] = [v for v in self.columns]
-        if self.data: body['data'] = [v for v in self.data]
-        if self.index: body['index'] = [v for v in self.index]
+        if self.columns:
+            body["columns"] = [v for v in self.columns]
+        if self.data:
+            body["data"] = [v for v in self.data]
+        if self.index:
+            body["index"] = [v for v in self.index]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DataframeSplitInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns: body['columns'] = self.columns
-        if self.data: body['data'] = self.data
-        if self.index: body['index'] = self.index
+        if self.columns:
+            body["columns"] = self.columns
+        if self.data:
+            body["data"] = self.data
+        if self.index:
+            body["index"] = self.index
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataframeSplitInput:
         """Deserializes the DataframeSplitInput from a dictionary."""
-        return cls(columns=d.get('columns', None), data=d.get('data', None), index=d.get('index', None))
+        return cls(
+            columns=d.get("columns", None),
+            data=d.get("data", None),
+            index=d.get("index", None),
+        )
 
 
 @dataclass
@@ -841,31 +979,39 @@ class EmbeddingsV1ResponseEmbeddingElement:
     def as_dict(self) -> dict:
         """Serializes the EmbeddingsV1ResponseEmbeddingElement into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.embedding: body['embedding'] = [v for v in self.embedding]
-        if self.index is not None: body['index'] = self.index
-        if self.object is not None: body['object'] = self.object.value
+        if self.embedding:
+            body["embedding"] = [v for v in self.embedding]
+        if self.index is not None:
+            body["index"] = self.index
+        if self.object is not None:
+            body["object"] = self.object.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EmbeddingsV1ResponseEmbeddingElement into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.embedding: body['embedding'] = self.embedding
-        if self.index is not None: body['index'] = self.index
-        if self.object is not None: body['object'] = self.object
+        if self.embedding:
+            body["embedding"] = self.embedding
+        if self.index is not None:
+            body["index"] = self.index
+        if self.object is not None:
+            body["object"] = self.object
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingsV1ResponseEmbeddingElement:
         """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary."""
-        return cls(embedding=d.get('embedding', None),
-                   index=d.get('index', None),
-                   object=_enum(d, 'object', EmbeddingsV1ResponseEmbeddingElementObject))
+        return cls(
+            embedding=d.get("embedding", None),
+            index=d.get("index", None),
+            object=_enum(d, "object", EmbeddingsV1ResponseEmbeddingElementObject),
+        )
 
 
 class EmbeddingsV1ResponseEmbeddingElementObject(Enum):
     """This will always be 'embedding'."""
 
-    EMBEDDING = 'embedding'
+    EMBEDDING = "embedding"
 
 
 @dataclass
@@ -892,31 +1038,43 @@ class EndpointCoreConfigInput:
     def as_dict(self) -> dict:
         """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities]
-        if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
-        if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
+        if self.auto_capture_config:
+            body["auto_capture_config"] = self.auto_capture_config.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.served_entities:
+            body["served_entities"] = [v.as_dict() for v in self.served_entities]
+        if self.served_models:
+            body["served_models"] = [v.as_dict() for v in self.served_models]
+        if self.traffic_config:
+            body["traffic_config"] = self.traffic_config.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointCoreConfigInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
-        if self.name is not None: body['name'] = self.name
-        if self.served_entities: body['served_entities'] = self.served_entities
-        if self.served_models: body['served_models'] = self.served_models
-        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        if self.auto_capture_config:
+            body["auto_capture_config"] = self.auto_capture_config
+        if self.name is not None:
+            body["name"] = self.name
+        if self.served_entities:
+            body["served_entities"] = self.served_entities
+        if self.served_models:
+            body["served_models"] = self.served_models
+        if self.traffic_config:
+            body["traffic_config"] = self.traffic_config
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput:
         """Deserializes the EndpointCoreConfigInput from a dictionary."""
-        return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigInput),
-                   name=d.get('name', None),
-                   served_entities=_repeated_dict(d, 'served_entities', ServedEntityInput),
-                   served_models=_repeated_dict(d, 'served_models', ServedModelInput),
-                   traffic_config=_from_dict(d, 'traffic_config', TrafficConfig))
+        return cls(
+            auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigInput),
+            name=d.get("name", None),
+            served_entities=_repeated_dict(d, "served_entities", ServedEntityInput),
+            served_models=_repeated_dict(d, "served_models", ServedModelInput),
+            traffic_config=_from_dict(d, "traffic_config", TrafficConfig),
+        )
 
 
 @dataclass
@@ -943,31 +1101,43 @@ class EndpointCoreConfigOutput:
     def as_dict(self) -> dict:
         """Serializes the EndpointCoreConfigOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict()
-        if self.config_version is not None: body['config_version'] = self.config_version
-        if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities]
-        if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
-        if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
+        if self.auto_capture_config:
+            body["auto_capture_config"] = self.auto_capture_config.as_dict()
+        if self.config_version is not None:
+            body["config_version"] = self.config_version
+        if self.served_entities:
+            body["served_entities"] = [v.as_dict() for v in self.served_entities]
+        if self.served_models:
+            body["served_models"] = [v.as_dict() for v in self.served_models]
+        if self.traffic_config:
+            body["traffic_config"] = self.traffic_config.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointCoreConfigOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
-        if self.config_version is not None: body['config_version'] = self.config_version
-        if self.served_entities: body['served_entities'] = self.served_entities
-        if self.served_models: body['served_models'] = self.served_models
-        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        if self.auto_capture_config:
+            body["auto_capture_config"] = self.auto_capture_config
+        if self.config_version is not None:
+            body["config_version"] = self.config_version
+        if self.served_entities:
+            body["served_entities"] = self.served_entities
+        if self.served_models:
+            body["served_models"] = self.served_models
+        if self.traffic_config:
+            body["traffic_config"] = self.traffic_config
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigOutput:
         """Deserializes the EndpointCoreConfigOutput from a dictionary."""
-        return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput),
-                   config_version=d.get('config_version', None),
-                   served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput),
-                   served_models=_repeated_dict(d, 'served_models', ServedModelOutput),
-                   traffic_config=_from_dict(d, 'traffic_config', TrafficConfig))
+        return cls(
+            auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigOutput),
+            config_version=d.get("config_version", None),
+            served_entities=_repeated_dict(d, "served_entities", ServedEntityOutput),
+            served_models=_repeated_dict(d, "served_models", ServedModelOutput),
+            traffic_config=_from_dict(d, "traffic_config", TrafficConfig),
+        )
 
 
 @dataclass
@@ -982,22 +1152,28 @@ class EndpointCoreConfigSummary:
     def as_dict(self) -> dict:
         """Serializes the EndpointCoreConfigSummary into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities]
-        if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
+        if self.served_entities:
+            body["served_entities"] = [v.as_dict() for v in self.served_entities]
+        if self.served_models:
+            body["served_models"] = [v.as_dict() for v in self.served_models]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointCoreConfigSummary into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.served_entities: body['served_entities'] = self.served_entities
-        if self.served_models: body['served_models'] = self.served_models
+        if self.served_entities:
+            body["served_entities"] = self.served_entities
+        if self.served_models:
+            body["served_models"] = self.served_models
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary:
         """Deserializes the EndpointCoreConfigSummary from a dictionary."""
-        return cls(served_entities=_repeated_dict(d, 'served_entities', ServedEntitySpec),
-                   served_models=_repeated_dict(d, 'served_models', ServedModelSpec))
+        return cls(
+            served_entities=_repeated_dict(d, "served_entities", ServedEntitySpec),
+            served_models=_repeated_dict(d, "served_models", ServedModelSpec),
+        )
 
 
 @dataclass
@@ -1027,34 +1203,48 @@ class EndpointPendingConfig:
     def as_dict(self) -> dict:
         """Serializes the EndpointPendingConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict()
-        if self.config_version is not None: body['config_version'] = self.config_version
-        if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities]
-        if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
+        if self.auto_capture_config:
+            body["auto_capture_config"] = self.auto_capture_config.as_dict()
+        if self.config_version is not None:
+            body["config_version"] = self.config_version
+        if self.served_entities:
+            body["served_entities"] = [v.as_dict() for v in self.served_entities]
+        if self.served_models:
+            body["served_models"] = [v.as_dict() for v in self.served_models]
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.traffic_config:
+            body["traffic_config"] = self.traffic_config.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointPendingConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
-        if self.config_version is not None: body['config_version'] = self.config_version
-        if self.served_entities: body['served_entities'] = self.served_entities
-        if self.served_models: body['served_models'] = self.served_models
-        if self.start_time is not None: body['start_time'] = self.start_time
-        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        if self.auto_capture_config:
+            body["auto_capture_config"] = self.auto_capture_config
+        if self.config_version is not None:
+            body["config_version"] = self.config_version
+        if self.served_entities:
+            body["served_entities"] = self.served_entities
+        if self.served_models:
+            body["served_models"] = self.served_models
+        if self.start_time is not None:
+            body["start_time"] = self.start_time
+        if self.traffic_config:
+            body["traffic_config"] = self.traffic_config
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig:
         """Deserializes the EndpointPendingConfig from a dictionary."""
-        return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput),
-                   config_version=d.get('config_version', None),
-                   served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput),
-                   served_models=_repeated_dict(d, 'served_models', ServedModelOutput),
-                   start_time=d.get('start_time', None),
-                   traffic_config=_from_dict(d, 'traffic_config', TrafficConfig))
+        return cls(
+            auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigOutput),
+            config_version=d.get("config_version", None),
+            served_entities=_repeated_dict(d, "served_entities", ServedEntityOutput),
+            served_models=_repeated_dict(d, "served_models", ServedModelOutput),
+            start_time=d.get("start_time", None),
+            traffic_config=_from_dict(d, "traffic_config", TrafficConfig),
+        )
 
 
 @dataclass
@@ -1073,36 +1263,42 @@ class EndpointState:
     def as_dict(self) -> dict:
         """Serializes the EndpointState into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.config_update is not None: body['config_update'] = self.config_update.value
-        if self.ready is not None: body['ready'] = self.ready.value
+        if self.config_update is not None:
+            body["config_update"] = self.config_update.value
+        if self.ready is not None:
+            body["ready"] = self.ready.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointState into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.config_update is not None: body['config_update'] = self.config_update
-        if self.ready is not None: body['ready'] = self.ready
+        if self.config_update is not None:
+            body["config_update"] = self.config_update
+        if self.ready is not None:
+            body["ready"] = self.ready
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointState:
         """Deserializes the EndpointState from a dictionary."""
-        return cls(config_update=_enum(d, 'config_update', EndpointStateConfigUpdate),
-                   ready=_enum(d, 'ready', EndpointStateReady))
+        return cls(
+            config_update=_enum(d, "config_update", EndpointStateConfigUpdate),
+            ready=_enum(d, "ready", EndpointStateReady),
+        )
 
 
 class EndpointStateConfigUpdate(Enum):
 
-    IN_PROGRESS = 'IN_PROGRESS'
-    NOT_UPDATING = 'NOT_UPDATING'
-    UPDATE_CANCELED = 'UPDATE_CANCELED'
-    UPDATE_FAILED = 'UPDATE_FAILED'
+    IN_PROGRESS = "IN_PROGRESS"
+    NOT_UPDATING = "NOT_UPDATING"
+    UPDATE_CANCELED = "UPDATE_CANCELED"
+    UPDATE_FAILED = "UPDATE_FAILED"
 
 
 class EndpointStateReady(Enum):
 
-    NOT_READY = 'NOT_READY'
-    READY = 'READY'
+    NOT_READY = "NOT_READY"
+    READY = "READY"
 
 
 @dataclass
@@ -1116,21 +1312,25 @@ class EndpointTag:
     def as_dict(self) -> dict:
         """Serializes the EndpointTag into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointTag into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTag:
         """Deserializes the EndpointTag from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -1140,19 +1340,21 @@ class EndpointTags:
     def as_dict(self) -> dict:
         """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointTags into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.tags: body['tags'] = self.tags
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
         """Deserializes the EndpointTags from a dictionary."""
-        return cls(tags=_repeated_dict(d, 'tags', EndpointTag))
+        return cls(tags=_repeated_dict(d, "tags", EndpointTag))
 
 
 @dataclass
@@ -1162,19 +1364,21 @@ class ExportMetricsResponse:
     def as_dict(self) -> dict:
         """Serializes the ExportMetricsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExportMetricsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse:
         """Deserializes the ExportMetricsResponse from a dictionary."""
-        return cls(contents=d.get('contents', None))
+        return cls(contents=d.get("contents", None))
 
 
 @dataclass
@@ -1203,43 +1407,57 @@ class ExternalFunctionRequest:
     def as_dict(self) -> dict:
         """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.headers is not None: body['headers'] = self.headers
-        if self.json is not None: body['json'] = self.json
-        if self.method is not None: body['method'] = self.method.value
-        if self.params is not None: body['params'] = self.params
-        if self.path is not None: body['path'] = self.path
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.headers is not None:
+            body["headers"] = self.headers
+        if self.json is not None:
+            body["json"] = self.json
+        if self.method is not None:
+            body["method"] = self.method.value
+        if self.params is not None:
+            body["params"] = self.params
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connection_name is not None: body['connection_name'] = self.connection_name
-        if self.headers is not None: body['headers'] = self.headers
-        if self.json is not None: body['json'] = self.json
-        if self.method is not None: body['method'] = self.method
-        if self.params is not None: body['params'] = self.params
-        if self.path is not None: body['path'] = self.path
+        if self.connection_name is not None:
+            body["connection_name"] = self.connection_name
+        if self.headers is not None:
+            body["headers"] = self.headers
+        if self.json is not None:
+            body["json"] = self.json
+        if self.method is not None:
+            body["method"] = self.method
+        if self.params is not None:
+            body["params"] = self.params
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalFunctionRequest:
         """Deserializes the ExternalFunctionRequest from a dictionary."""
-        return cls(connection_name=d.get('connection_name', None),
-                   headers=d.get('headers', None),
-                   json=d.get('json', None),
-                   method=_enum(d, 'method', ExternalFunctionRequestHttpMethod),
-                   params=d.get('params', None),
-                   path=d.get('path', None))
+        return cls(
+            connection_name=d.get("connection_name", None),
+            headers=d.get("headers", None),
+            json=d.get("json", None),
+            method=_enum(d, "method", ExternalFunctionRequestHttpMethod),
+            params=d.get("params", None),
+            path=d.get("path", None),
+        )
 
 
 class ExternalFunctionRequestHttpMethod(Enum):
 
-    DELETE = 'DELETE'
-    GET = 'GET'
-    PATCH = 'PATCH'
-    POST = 'POST'
-    PUT = 'PUT'
+    DELETE = "DELETE"
+    GET = "GET"
+    PATCH = "PATCH"
+    POST = "POST"
+    PUT = "PUT"
 
 
 @dataclass
@@ -1282,67 +1500,89 @@ class ExternalModel:
     def as_dict(self) -> dict:
         """Serializes the ExternalModel into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config.as_dict()
-        if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config.as_dict()
-        if self.anthropic_config: body['anthropic_config'] = self.anthropic_config.as_dict()
-        if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict()
+        if self.ai21labs_config:
+            body["ai21labs_config"] = self.ai21labs_config.as_dict()
+        if self.amazon_bedrock_config:
+            body["amazon_bedrock_config"] = self.amazon_bedrock_config.as_dict()
+        if self.anthropic_config:
+            body["anthropic_config"] = self.anthropic_config.as_dict()
+        if self.cohere_config:
+            body["cohere_config"] = self.cohere_config.as_dict()
         if self.databricks_model_serving_config:
-            body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict()
+            body["databricks_model_serving_config"] = self.databricks_model_serving_config.as_dict()
         if self.google_cloud_vertex_ai_config:
-            body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.openai_config: body['openai_config'] = self.openai_config.as_dict()
-        if self.palm_config: body['palm_config'] = self.palm_config.as_dict()
-        if self.provider is not None: body['provider'] = self.provider.value
-        if self.task is not None: body['task'] = self.task
+            body["google_cloud_vertex_ai_config"] = self.google_cloud_vertex_ai_config.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.openai_config:
+            body["openai_config"] = self.openai_config.as_dict()
+        if self.palm_config:
+            body["palm_config"] = self.palm_config.as_dict()
+        if self.provider is not None:
+            body["provider"] = self.provider.value
+        if self.task is not None:
+            body["task"] = self.task
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExternalModel into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config
-        if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config
-        if self.anthropic_config: body['anthropic_config'] = self.anthropic_config
-        if self.cohere_config: body['cohere_config'] = self.cohere_config
+        if self.ai21labs_config:
+            body["ai21labs_config"] = self.ai21labs_config
+        if self.amazon_bedrock_config:
+            body["amazon_bedrock_config"] = self.amazon_bedrock_config
+        if self.anthropic_config:
+            body["anthropic_config"] = self.anthropic_config
+        if self.cohere_config:
+            body["cohere_config"] = self.cohere_config
         if self.databricks_model_serving_config:
-            body['databricks_model_serving_config'] = self.databricks_model_serving_config
+            body["databricks_model_serving_config"] = self.databricks_model_serving_config
         if self.google_cloud_vertex_ai_config:
-            body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config
-        if self.name is not None: body['name'] = self.name
-        if self.openai_config: body['openai_config'] = self.openai_config
-        if self.palm_config: body['palm_config'] = self.palm_config
-        if self.provider is not None: body['provider'] = self.provider
-        if self.task is not None: body['task'] = self.task
+            body["google_cloud_vertex_ai_config"] = self.google_cloud_vertex_ai_config
+        if self.name is not None:
+            body["name"] = self.name
+        if self.openai_config:
+            body["openai_config"] = self.openai_config
+        if self.palm_config:
+            body["palm_config"] = self.palm_config
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.task is not None:
+            body["task"] = self.task
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
         """Deserializes the ExternalModel from a dictionary."""
-        return cls(ai21labs_config=_from_dict(d, 'ai21labs_config', Ai21LabsConfig),
-                   amazon_bedrock_config=_from_dict(d, 'amazon_bedrock_config', AmazonBedrockConfig),
-                   anthropic_config=_from_dict(d, 'anthropic_config', AnthropicConfig),
-                   cohere_config=_from_dict(d, 'cohere_config', CohereConfig),
-                   databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config',
-                                                              DatabricksModelServingConfig),
-                   google_cloud_vertex_ai_config=_from_dict(d, 'google_cloud_vertex_ai_config',
-                                                            GoogleCloudVertexAiConfig),
-                   name=d.get('name', None),
-                   openai_config=_from_dict(d, 'openai_config', OpenAiConfig),
-                   palm_config=_from_dict(d, 'palm_config', PaLmConfig),
-                   provider=_enum(d, 'provider', ExternalModelProvider),
-                   task=d.get('task', None))
+        return cls(
+            ai21labs_config=_from_dict(d, "ai21labs_config", Ai21LabsConfig),
+            amazon_bedrock_config=_from_dict(d, "amazon_bedrock_config", AmazonBedrockConfig),
+            anthropic_config=_from_dict(d, "anthropic_config", AnthropicConfig),
+            cohere_config=_from_dict(d, "cohere_config", CohereConfig),
+            databricks_model_serving_config=_from_dict(
+                d,
+                "databricks_model_serving_config",
+                DatabricksModelServingConfig,
+            ),
+            google_cloud_vertex_ai_config=_from_dict(d, "google_cloud_vertex_ai_config", GoogleCloudVertexAiConfig),
+            name=d.get("name", None),
+            openai_config=_from_dict(d, "openai_config", OpenAiConfig),
+            palm_config=_from_dict(d, "palm_config", PaLmConfig),
+            provider=_enum(d, "provider", ExternalModelProvider),
+            task=d.get("task", None),
+        )
 
 
 class ExternalModelProvider(Enum):
 
-    AI21LABS = 'ai21labs'
-    AMAZON_BEDROCK = 'amazon-bedrock'
-    ANTHROPIC = 'anthropic'
-    COHERE = 'cohere'
-    DATABRICKS_MODEL_SERVING = 'databricks-model-serving'
-    GOOGLE_CLOUD_VERTEX_AI = 'google-cloud-vertex-ai'
-    OPENAI = 'openai'
-    PALM = 'palm'
+    AI21LABS = "ai21labs"
+    AMAZON_BEDROCK = "amazon-bedrock"
+    ANTHROPIC = "anthropic"
+    COHERE = "cohere"
+    DATABRICKS_MODEL_SERVING = "databricks-model-serving"
+    GOOGLE_CLOUD_VERTEX_AI = "google-cloud-vertex-ai"
+    OPENAI = "openai"
+    PALM = "palm"
 
 
 @dataclass
@@ -1359,25 +1599,33 @@ class ExternalModelUsageElement:
     def as_dict(self) -> dict:
         """Serializes the ExternalModelUsageElement into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens
-        if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens
-        if self.total_tokens is not None: body['total_tokens'] = self.total_tokens
+        if self.completion_tokens is not None:
+            body["completion_tokens"] = self.completion_tokens
+        if self.prompt_tokens is not None:
+            body["prompt_tokens"] = self.prompt_tokens
+        if self.total_tokens is not None:
+            body["total_tokens"] = self.total_tokens
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExternalModelUsageElement into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens
-        if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens
-        if self.total_tokens is not None: body['total_tokens'] = self.total_tokens
+        if self.completion_tokens is not None:
+            body["completion_tokens"] = self.completion_tokens
+        if self.prompt_tokens is not None:
+            body["prompt_tokens"] = self.prompt_tokens
+        if self.total_tokens is not None:
+            body["total_tokens"] = self.total_tokens
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement:
         """Deserializes the ExternalModelUsageElement from a dictionary."""
-        return cls(completion_tokens=d.get('completion_tokens', None),
-                   prompt_tokens=d.get('prompt_tokens', None),
-                   total_tokens=d.get('total_tokens', None))
+        return cls(
+            completion_tokens=d.get("completion_tokens", None),
+            prompt_tokens=d.get("prompt_tokens", None),
+            total_tokens=d.get("total_tokens", None),
+        )
 
 
 @dataclass
@@ -1396,28 +1644,38 @@ class FoundationModel:
     def as_dict(self) -> dict:
         """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.docs is not None: body['docs'] = self.docs
-        if self.name is not None: body['name'] = self.name
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.docs is not None:
+            body["docs"] = self.docs
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FoundationModel into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.docs is not None: body['docs'] = self.docs
-        if self.name is not None: body['name'] = self.name
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.docs is not None:
+            body["docs"] = self.docs
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FoundationModel:
         """Deserializes the FoundationModel from a dictionary."""
-        return cls(description=d.get('description', None),
-                   display_name=d.get('display_name', None),
-                   docs=d.get('docs', None),
-                   name=d.get('name', None))
+        return cls(
+            description=d.get("description", None),
+            display_name=d.get("display_name", None),
+            docs=d.get("docs", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -1427,19 +1685,21 @@ class GetOpenApiResponse:
     def as_dict(self) -> dict:
         """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetOpenApiResponse:
         """Deserializes the GetOpenApiResponse from a dictionary."""
-        return cls(contents=d.get('contents', None))
+        return cls(contents=d.get("contents", None))
 
 
 @dataclass
@@ -1450,20 +1710,21 @@ class GetServingEndpointPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetServingEndpointPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetServingEndpointPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsResponse:
         """Deserializes the GetServingEndpointPermissionLevelsResponse from a dictionary."""
-        return cls(
-            permission_levels=_repeated_dict(d, 'permission_levels', ServingEndpointPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", ServingEndpointPermissionsDescription))
 
 
 @dataclass
@@ -1498,28 +1759,38 @@ class GoogleCloudVertexAiConfig:
     def as_dict(self) -> dict:
         """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.private_key is not None: body['private_key'] = self.private_key
-        if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext
-        if self.project_id is not None: body['project_id'] = self.project_id
-        if self.region is not None: body['region'] = self.region
+        if self.private_key is not None:
+            body["private_key"] = self.private_key
+        if self.private_key_plaintext is not None:
+            body["private_key_plaintext"] = self.private_key_plaintext
+        if self.project_id is not None:
+            body["project_id"] = self.project_id
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GoogleCloudVertexAiConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.private_key is not None: body['private_key'] = self.private_key
-        if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext
-        if self.project_id is not None: body['project_id'] = self.project_id
-        if self.region is not None: body['region'] = self.region
+        if self.private_key is not None:
+            body["private_key"] = self.private_key
+        if self.private_key_plaintext is not None:
+            body["private_key_plaintext"] = self.private_key_plaintext
+        if self.project_id is not None:
+            body["project_id"] = self.project_id
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig:
         """Deserializes the GoogleCloudVertexAiConfig from a dictionary."""
-        return cls(private_key=d.get('private_key', None),
-                   private_key_plaintext=d.get('private_key_plaintext', None),
-                   project_id=d.get('project_id', None),
-                   region=d.get('region', None))
+        return cls(
+            private_key=d.get("private_key", None),
+            private_key_plaintext=d.get("private_key_plaintext", None),
+            project_id=d.get("project_id", None),
+            region=d.get("region", None),
+        )
 
 
 @dataclass
@@ -1529,19 +1800,21 @@ class HttpRequestResponse:
     def as_dict(self) -> dict:
         """Serializes the HttpRequestResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the HttpRequestResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.contents: body['contents'] = self.contents
+        if self.contents:
+            body["contents"] = self.contents
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpRequestResponse:
         """Deserializes the HttpRequestResponse from a dictionary."""
-        return cls(contents=d.get('contents', None))
+        return cls(contents=d.get("contents", None))
 
 
 @dataclass
@@ -1552,19 +1825,21 @@ class ListEndpointsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListEndpointsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints]
+        if self.endpoints:
+            body["endpoints"] = [v.as_dict() for v in self.endpoints]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.endpoints: body['endpoints'] = self.endpoints
+        if self.endpoints:
+            body["endpoints"] = self.endpoints
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse:
         """Deserializes the ListEndpointsResponse from a dictionary."""
-        return cls(endpoints=_repeated_dict(d, 'endpoints', ServingEndpoint))
+        return cls(endpoints=_repeated_dict(d, "endpoints", ServingEndpoint))
 
 
 @dataclass
@@ -1578,19 +1853,21 @@ class ModelDataPlaneInfo:
     def as_dict(self) -> dict:
         """Serializes the ModelDataPlaneInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.query_info: body['query_info'] = self.query_info.as_dict()
+        if self.query_info:
+            body["query_info"] = self.query_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ModelDataPlaneInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.query_info: body['query_info'] = self.query_info
+        if self.query_info:
+            body["query_info"] = self.query_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDataPlaneInfo:
         """Deserializes the ModelDataPlaneInfo from a dictionary."""
-        return cls(query_info=_from_dict(d, 'query_info', DataPlaneInfo))
+        return cls(query_info=_from_dict(d, "query_info", DataPlaneInfo))
 
 
 @dataclass
@@ -1652,61 +1929,72 @@ def as_dict(self) -> dict:
         """Serializes the OpenAiConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.microsoft_entra_client_id is not None:
-            body['microsoft_entra_client_id'] = self.microsoft_entra_client_id
+            body["microsoft_entra_client_id"] = self.microsoft_entra_client_id
         if self.microsoft_entra_client_secret is not None:
-            body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret
+            body["microsoft_entra_client_secret"] = self.microsoft_entra_client_secret
         if self.microsoft_entra_client_secret_plaintext is not None:
-            body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext
+            body["microsoft_entra_client_secret_plaintext"] = self.microsoft_entra_client_secret_plaintext
         if self.microsoft_entra_tenant_id is not None:
-            body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id
-        if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base
-        if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key
+            body["microsoft_entra_tenant_id"] = self.microsoft_entra_tenant_id
+        if self.openai_api_base is not None:
+            body["openai_api_base"] = self.openai_api_base
+        if self.openai_api_key is not None:
+            body["openai_api_key"] = self.openai_api_key
         if self.openai_api_key_plaintext is not None:
-            body['openai_api_key_plaintext'] = self.openai_api_key_plaintext
-        if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type
-        if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version
+            body["openai_api_key_plaintext"] = self.openai_api_key_plaintext
+        if self.openai_api_type is not None:
+            body["openai_api_type"] = self.openai_api_type
+        if self.openai_api_version is not None:
+            body["openai_api_version"] = self.openai_api_version
         if self.openai_deployment_name is not None:
-            body['openai_deployment_name'] = self.openai_deployment_name
-        if self.openai_organization is not None: body['openai_organization'] = self.openai_organization
+            body["openai_deployment_name"] = self.openai_deployment_name
+        if self.openai_organization is not None:
+            body["openai_organization"] = self.openai_organization
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OpenAiConfig into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.microsoft_entra_client_id is not None:
-            body['microsoft_entra_client_id'] = self.microsoft_entra_client_id
+            body["microsoft_entra_client_id"] = self.microsoft_entra_client_id
         if self.microsoft_entra_client_secret is not None:
-            body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret
+            body["microsoft_entra_client_secret"] = self.microsoft_entra_client_secret
         if self.microsoft_entra_client_secret_plaintext is not None:
-            body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext
+            body["microsoft_entra_client_secret_plaintext"] = self.microsoft_entra_client_secret_plaintext
         if self.microsoft_entra_tenant_id is not None:
-            body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id
-        if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base
-        if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key
+            body["microsoft_entra_tenant_id"] = self.microsoft_entra_tenant_id
+        if self.openai_api_base is not None:
+            body["openai_api_base"] = self.openai_api_base
+        if self.openai_api_key is not None:
+            body["openai_api_key"] = self.openai_api_key
         if self.openai_api_key_plaintext is not None:
-            body['openai_api_key_plaintext'] = self.openai_api_key_plaintext
-        if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type
-        if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version
+            body["openai_api_key_plaintext"] = self.openai_api_key_plaintext
+        if self.openai_api_type is not None:
+            body["openai_api_type"] = self.openai_api_type
+        if self.openai_api_version is not None:
+            body["openai_api_version"] = self.openai_api_version
         if self.openai_deployment_name is not None:
-            body['openai_deployment_name'] = self.openai_deployment_name
-        if self.openai_organization is not None: body['openai_organization'] = self.openai_organization
+            body["openai_deployment_name"] = self.openai_deployment_name
+        if self.openai_organization is not None:
+            body["openai_organization"] = self.openai_organization
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig:
         """Deserializes the OpenAiConfig from a dictionary."""
-        return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None),
-                   microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None),
-                   microsoft_entra_client_secret_plaintext=d.get('microsoft_entra_client_secret_plaintext',
-                                                                 None),
-                   microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None),
-                   openai_api_base=d.get('openai_api_base', None),
-                   openai_api_key=d.get('openai_api_key', None),
-                   openai_api_key_plaintext=d.get('openai_api_key_plaintext', None),
-                   openai_api_type=d.get('openai_api_type', None),
-                   openai_api_version=d.get('openai_api_version', None),
-                   openai_deployment_name=d.get('openai_deployment_name', None),
-                   openai_organization=d.get('openai_organization', None))
+        return cls(
+            microsoft_entra_client_id=d.get("microsoft_entra_client_id", None),
+            microsoft_entra_client_secret=d.get("microsoft_entra_client_secret", None),
+            microsoft_entra_client_secret_plaintext=d.get("microsoft_entra_client_secret_plaintext", None),
+            microsoft_entra_tenant_id=d.get("microsoft_entra_tenant_id", None),
+            openai_api_base=d.get("openai_api_base", None),
+            openai_api_key=d.get("openai_api_key", None),
+            openai_api_key_plaintext=d.get("openai_api_key_plaintext", None),
+            openai_api_type=d.get("openai_api_type", None),
+            openai_api_version=d.get("openai_api_version", None),
+            openai_deployment_name=d.get("openai_deployment_name", None),
+            openai_organization=d.get("openai_organization", None),
+        )
 
 
 @dataclass
@@ -1724,24 +2012,28 @@ class PaLmConfig:
     def as_dict(self) -> dict:
         """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key
+        if self.palm_api_key is not None:
+            body["palm_api_key"] = self.palm_api_key
         if self.palm_api_key_plaintext is not None:
-            body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
+            body["palm_api_key_plaintext"] = self.palm_api_key_plaintext
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PaLmConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key
+        if self.palm_api_key is not None:
+            body["palm_api_key"] = self.palm_api_key
         if self.palm_api_key_plaintext is not None:
-            body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
+            body["palm_api_key_plaintext"] = self.palm_api_key_plaintext
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PaLmConfig:
         """Deserializes the PaLmConfig from a dictionary."""
-        return cls(palm_api_key=d.get('palm_api_key', None),
-                   palm_api_key_plaintext=d.get('palm_api_key_plaintext', None))
+        return cls(
+            palm_api_key=d.get("palm_api_key", None),
+            palm_api_key_plaintext=d.get("palm_api_key_plaintext", None),
+        )
 
 
 @dataclass
@@ -1758,25 +2050,33 @@ class PatchServingEndpointTags:
     def as_dict(self) -> dict:
         """Serializes the PatchServingEndpointTags into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.add_tags: body['add_tags'] = [v.as_dict() for v in self.add_tags]
-        if self.delete_tags: body['delete_tags'] = [v for v in self.delete_tags]
-        if self.name is not None: body['name'] = self.name
+        if self.add_tags:
+            body["add_tags"] = [v.as_dict() for v in self.add_tags]
+        if self.delete_tags:
+            body["delete_tags"] = [v for v in self.delete_tags]
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.add_tags: body['add_tags'] = self.add_tags
-        if self.delete_tags: body['delete_tags'] = self.delete_tags
-        if self.name is not None: body['name'] = self.name
+        if self.add_tags:
+            body["add_tags"] = self.add_tags
+        if self.delete_tags:
+            body["delete_tags"] = self.delete_tags
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags:
         """Deserializes the PatchServingEndpointTags from a dictionary."""
-        return cls(add_tags=_repeated_dict(d, 'add_tags', EndpointTag),
-                   delete_tags=d.get('delete_tags', None),
-                   name=d.get('name', None))
+        return cls(
+            add_tags=_repeated_dict(d, "add_tags", EndpointTag),
+            delete_tags=d.get("delete_tags", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -1790,25 +2090,33 @@ class PayloadTable:
     def as_dict(self) -> dict:
         """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.status is not None: body['status'] = self.status
-        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.name is not None:
+            body["name"] = self.name
+        if self.status is not None:
+            body["status"] = self.status
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PayloadTable into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.status is not None: body['status'] = self.status
-        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.name is not None:
+            body["name"] = self.name
+        if self.status is not None:
+            body["status"] = self.status
+        if self.status_message is not None:
+            body["status_message"] = self.status_message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
         """Deserializes the PayloadTable from a dictionary."""
-        return cls(name=d.get('name', None),
-                   status=d.get('status', None),
-                   status_message=d.get('status_message', None))
+        return cls(
+            name=d.get("name", None),
+            status=d.get("status", None),
+            status_message=d.get("status_message", None),
+        )
 
 
 @dataclass
@@ -1834,32 +2142,43 @@ class PutAiGatewayRequest:
     def as_dict(self) -> dict:
         """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
-        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
-        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        if self.guardrails:
+            body["guardrails"] = self.guardrails.as_dict()
+        if self.inference_table_config:
+            body["inference_table_config"] = self.inference_table_config.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rate_limits:
+            body["rate_limits"] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config:
+            body["usage_tracking_config"] = self.usage_tracking_config.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.guardrails: body['guardrails'] = self.guardrails
-        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
-        if self.name is not None: body['name'] = self.name
-        if self.rate_limits: body['rate_limits'] = self.rate_limits
-        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        if self.guardrails:
+            body["guardrails"] = self.guardrails
+        if self.inference_table_config:
+            body["inference_table_config"] = self.inference_table_config
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rate_limits:
+            body["rate_limits"] = self.rate_limits
+        if self.usage_tracking_config:
+            body["usage_tracking_config"] = self.usage_tracking_config
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayRequest:
         """Deserializes the PutAiGatewayRequest from a dictionary."""
-        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
-                   inference_table_config=_from_dict(d, 'inference_table_config',
-                                                     AiGatewayInferenceTableConfig),
-                   name=d.get('name', None),
-                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
-                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+        return cls(
+            guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails),
+            inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig),
+            name=d.get("name", None),
+            rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit),
+            usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig),
+        )
 
 
 @dataclass
@@ -1882,29 +2201,38 @@ class PutAiGatewayResponse:
     def as_dict(self) -> dict:
         """Serializes the PutAiGatewayResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
-        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
-        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
-        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        if self.guardrails:
+            body["guardrails"] = self.guardrails.as_dict()
+        if self.inference_table_config:
+            body["inference_table_config"] = self.inference_table_config.as_dict()
+        if self.rate_limits:
+            body["rate_limits"] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config:
+            body["usage_tracking_config"] = self.usage_tracking_config.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PutAiGatewayResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.guardrails: body['guardrails'] = self.guardrails
-        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
-        if self.rate_limits: body['rate_limits'] = self.rate_limits
-        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        if self.guardrails:
+            body["guardrails"] = self.guardrails
+        if self.inference_table_config:
+            body["inference_table_config"] = self.inference_table_config
+        if self.rate_limits:
+            body["rate_limits"] = self.rate_limits
+        if self.usage_tracking_config:
+            body["usage_tracking_config"] = self.usage_tracking_config
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
         """Deserializes the PutAiGatewayResponse from a dictionary."""
-        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
-                   inference_table_config=_from_dict(d, 'inference_table_config',
-                                                     AiGatewayInferenceTableConfig),
-                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
-                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+        return cls(
+            guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails),
+            inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig),
+            rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit),
+            usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig),
+        )
 
 
 @dataclass
@@ -1918,21 +2246,28 @@ class PutRequest:
     def as_dict(self) -> dict:
         """Serializes the PutRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rate_limits:
+            body["rate_limits"] = [v.as_dict() for v in self.rate_limits]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PutRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.name is not None:
+            body["name"] = self.name
+        if self.rate_limits:
+            body["rate_limits"] = self.rate_limits
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutRequest:
         """Deserializes the PutRequest from a dictionary."""
-        return cls(name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit))
+        return cls(
+            name=d.get("name", None),
+            rate_limits=_repeated_dict(d, "rate_limits", RateLimit),
+        )
 
 
 @dataclass
@@ -1943,19 +2278,21 @@ class PutResponse:
     def as_dict(self) -> dict:
         """Serializes the PutResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.rate_limits:
+            body["rate_limits"] = [v.as_dict() for v in self.rate_limits]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PutResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.rate_limits:
+            body["rate_limits"] = self.rate_limits
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutResponse:
         """Deserializes the PutResponse from a dictionary."""
-        return cls(rate_limits=_repeated_dict(d, 'rate_limits', RateLimit))
+        return cls(rate_limits=_repeated_dict(d, "rate_limits", RateLimit))
 
 
 @dataclass
@@ -2021,58 +2358,88 @@ class QueryEndpointInput:
     def as_dict(self) -> dict:
         """Serializes the QueryEndpointInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dataframe_records: body['dataframe_records'] = [v for v in self.dataframe_records]
-        if self.dataframe_split: body['dataframe_split'] = self.dataframe_split.as_dict()
-        if self.extra_params: body['extra_params'] = self.extra_params
-        if self.input: body['input'] = self.input
-        if self.inputs: body['inputs'] = self.inputs
-        if self.instances: body['instances'] = [v for v in self.instances]
-        if self.max_tokens is not None: body['max_tokens'] = self.max_tokens
-        if self.messages: body['messages'] = [v.as_dict() for v in self.messages]
-        if self.n is not None: body['n'] = self.n
-        if self.name is not None: body['name'] = self.name
-        if self.prompt: body['prompt'] = self.prompt
-        if self.stop: body['stop'] = [v for v in self.stop]
-        if self.stream is not None: body['stream'] = self.stream
-        if self.temperature is not None: body['temperature'] = self.temperature
+        if self.dataframe_records:
+            body["dataframe_records"] = [v for v in self.dataframe_records]
+        if self.dataframe_split:
+            body["dataframe_split"] = self.dataframe_split.as_dict()
+        if self.extra_params:
+            body["extra_params"] = self.extra_params
+        if self.input:
+            body["input"] = self.input
+        if self.inputs:
+            body["inputs"] = self.inputs
+        if self.instances:
+            body["instances"] = [v for v in self.instances]
+        if self.max_tokens is not None:
+            body["max_tokens"] = self.max_tokens
+        if self.messages:
+            body["messages"] = [v.as_dict() for v in self.messages]
+        if self.n is not None:
+            body["n"] = self.n
+        if self.name is not None:
+            body["name"] = self.name
+        if self.prompt:
+            body["prompt"] = self.prompt
+        if self.stop:
+            body["stop"] = [v for v in self.stop]
+        if self.stream is not None:
+            body["stream"] = self.stream
+        if self.temperature is not None:
+            body["temperature"] = self.temperature
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dataframe_records: body['dataframe_records'] = self.dataframe_records
-        if self.dataframe_split: body['dataframe_split'] = self.dataframe_split
-        if self.extra_params: body['extra_params'] = self.extra_params
-        if self.input: body['input'] = self.input
-        if self.inputs: body['inputs'] = self.inputs
-        if self.instances: body['instances'] = self.instances
-        if self.max_tokens is not None: body['max_tokens'] = self.max_tokens
-        if self.messages: body['messages'] = self.messages
-        if self.n is not None: body['n'] = self.n
-        if self.name is not None: body['name'] = self.name
-        if self.prompt: body['prompt'] = self.prompt
-        if self.stop: body['stop'] = self.stop
-        if self.stream is not None: body['stream'] = self.stream
-        if self.temperature is not None: body['temperature'] = self.temperature
+        if self.dataframe_records:
+            body["dataframe_records"] = self.dataframe_records
+        if self.dataframe_split:
+            body["dataframe_split"] = self.dataframe_split
+        if self.extra_params:
+            body["extra_params"] = self.extra_params
+        if self.input:
+            body["input"] = self.input
+        if self.inputs:
+            body["inputs"] = self.inputs
+        if self.instances:
+            body["instances"] = self.instances
+        if self.max_tokens is not None:
+            body["max_tokens"] = self.max_tokens
+        if self.messages:
+            body["messages"] = self.messages
+        if self.n is not None:
+            body["n"] = self.n
+        if self.name is not None:
+            body["name"] = self.name
+        if self.prompt:
+            body["prompt"] = self.prompt
+        if self.stop:
+            body["stop"] = self.stop
+        if self.stream is not None:
+            body["stream"] = self.stream
+        if self.temperature is not None:
+            body["temperature"] = self.temperature
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEndpointInput:
         """Deserializes the QueryEndpointInput from a dictionary."""
-        return cls(dataframe_records=d.get('dataframe_records', None),
-                   dataframe_split=_from_dict(d, 'dataframe_split', DataframeSplitInput),
-                   extra_params=d.get('extra_params', None),
-                   input=d.get('input', None),
-                   inputs=d.get('inputs', None),
-                   instances=d.get('instances', None),
-                   max_tokens=d.get('max_tokens', None),
-                   messages=_repeated_dict(d, 'messages', ChatMessage),
-                   n=d.get('n', None),
-                   name=d.get('name', None),
-                   prompt=d.get('prompt', None),
-                   stop=d.get('stop', None),
-                   stream=d.get('stream', None),
-                   temperature=d.get('temperature', None))
+        return cls(
+            dataframe_records=d.get("dataframe_records", None),
+            dataframe_split=_from_dict(d, "dataframe_split", DataframeSplitInput),
+            extra_params=d.get("extra_params", None),
+            input=d.get("input", None),
+            inputs=d.get("inputs", None),
+            instances=d.get("instances", None),
+            max_tokens=d.get("max_tokens", None),
+            messages=_repeated_dict(d, "messages", ChatMessage),
+            n=d.get("n", None),
+            name=d.get("name", None),
+            prompt=d.get("prompt", None),
+            stop=d.get("stop", None),
+            stream=d.get("stream", None),
+            temperature=d.get("temperature", None),
+        )
 
 
 @dataclass
@@ -2115,52 +2482,72 @@ class QueryEndpointResponse:
     def as_dict(self) -> dict:
         """Serializes the QueryEndpointResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.choices: body['choices'] = [v.as_dict() for v in self.choices]
-        if self.created is not None: body['created'] = self.created
-        if self.data: body['data'] = [v.as_dict() for v in self.data]
-        if self.id is not None: body['id'] = self.id
-        if self.model is not None: body['model'] = self.model
-        if self.object is not None: body['object'] = self.object.value
-        if self.predictions: body['predictions'] = [v for v in self.predictions]
-        if self.served_model_name is not None: body['served-model-name'] = self.served_model_name
-        if self.usage: body['usage'] = self.usage.as_dict()
+        if self.choices:
+            body["choices"] = [v.as_dict() for v in self.choices]
+        if self.created is not None:
+            body["created"] = self.created
+        if self.data:
+            body["data"] = [v.as_dict() for v in self.data]
+        if self.id is not None:
+            body["id"] = self.id
+        if self.model is not None:
+            body["model"] = self.model
+        if self.object is not None:
+            body["object"] = self.object.value
+        if self.predictions:
+            body["predictions"] = [v for v in self.predictions]
+        if self.served_model_name is not None:
+            body["served-model-name"] = self.served_model_name
+        if self.usage:
+            body["usage"] = self.usage.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryEndpointResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.choices: body['choices'] = self.choices
-        if self.created is not None: body['created'] = self.created
-        if self.data: body['data'] = self.data
-        if self.id is not None: body['id'] = self.id
-        if self.model is not None: body['model'] = self.model
-        if self.object is not None: body['object'] = self.object
-        if self.predictions: body['predictions'] = self.predictions
-        if self.served_model_name is not None: body['served-model-name'] = self.served_model_name
-        if self.usage: body['usage'] = self.usage
+        if self.choices:
+            body["choices"] = self.choices
+        if self.created is not None:
+            body["created"] = self.created
+        if self.data:
+            body["data"] = self.data
+        if self.id is not None:
+            body["id"] = self.id
+        if self.model is not None:
+            body["model"] = self.model
+        if self.object is not None:
+            body["object"] = self.object
+        if self.predictions:
+            body["predictions"] = self.predictions
+        if self.served_model_name is not None:
+            body["served-model-name"] = self.served_model_name
+        if self.usage:
+            body["usage"] = self.usage
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEndpointResponse:
         """Deserializes the QueryEndpointResponse from a dictionary."""
-        return cls(choices=_repeated_dict(d, 'choices', V1ResponseChoiceElement),
-                   created=d.get('created', None),
-                   data=_repeated_dict(d, 'data', EmbeddingsV1ResponseEmbeddingElement),
-                   id=d.get('id', None),
-                   model=d.get('model', None),
-                   object=_enum(d, 'object', QueryEndpointResponseObject),
-                   predictions=d.get('predictions', None),
-                   served_model_name=d.get('served-model-name', None),
-                   usage=_from_dict(d, 'usage', ExternalModelUsageElement))
+        return cls(
+            choices=_repeated_dict(d, "choices", V1ResponseChoiceElement),
+            created=d.get("created", None),
+            data=_repeated_dict(d, "data", EmbeddingsV1ResponseEmbeddingElement),
+            id=d.get("id", None),
+            model=d.get("model", None),
+            object=_enum(d, "object", QueryEndpointResponseObject),
+            predictions=d.get("predictions", None),
+            served_model_name=d.get("served-model-name", None),
+            usage=_from_dict(d, "usage", ExternalModelUsageElement),
+        )
 
 
 class QueryEndpointResponseObject(Enum):
     """The type of object returned by the __external/foundation model__ serving endpoint, one of
     [text_completion, chat.completion, list (of embeddings)]."""
 
-    CHAT_COMPLETION = 'chat.completion'
-    LIST = 'list'
-    TEXT_COMPLETION = 'text_completion'
+    CHAT_COMPLETION = "chat.completion"
+    LIST = "list"
+    TEXT_COMPLETION = "text_completion"
 
 
 @dataclass
@@ -2178,36 +2565,44 @@ class RateLimit:
     def as_dict(self) -> dict:
         """Serializes the RateLimit into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.calls is not None: body['calls'] = self.calls
-        if self.key is not None: body['key'] = self.key.value
-        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
+        if self.calls is not None:
+            body["calls"] = self.calls
+        if self.key is not None:
+            body["key"] = self.key.value
+        if self.renewal_period is not None:
+            body["renewal_period"] = self.renewal_period.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RateLimit into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.calls is not None: body['calls'] = self.calls
-        if self.key is not None: body['key'] = self.key
-        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period
+        if self.calls is not None:
+            body["calls"] = self.calls
+        if self.key is not None:
+            body["key"] = self.key
+        if self.renewal_period is not None:
+            body["renewal_period"] = self.renewal_period
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RateLimit:
         """Deserializes the RateLimit from a dictionary."""
-        return cls(calls=d.get('calls', None),
-                   key=_enum(d, 'key', RateLimitKey),
-                   renewal_period=_enum(d, 'renewal_period', RateLimitRenewalPeriod))
+        return cls(
+            calls=d.get("calls", None),
+            key=_enum(d, "key", RateLimitKey),
+            renewal_period=_enum(d, "renewal_period", RateLimitRenewalPeriod),
+        )
 
 
 class RateLimitKey(Enum):
 
-    ENDPOINT = 'endpoint'
-    USER = 'user'
+    ENDPOINT = "endpoint"
+    USER = "user"
 
 
 class RateLimitRenewalPeriod(Enum):
 
-    MINUTE = 'minute'
+    MINUTE = "minute"
 
 
 @dataclass
@@ -2222,22 +2617,28 @@ class Route:
     def as_dict(self) -> dict:
         """Serializes the Route into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.served_model_name is not None: body['served_model_name'] = self.served_model_name
-        if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage
+        if self.served_model_name is not None:
+            body["served_model_name"] = self.served_model_name
+        if self.traffic_percentage is not None:
+            body["traffic_percentage"] = self.traffic_percentage
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Route into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.served_model_name is not None: body['served_model_name'] = self.served_model_name
-        if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage
+        if self.served_model_name is not None:
+            body["served_model_name"] = self.served_model_name
+        if self.traffic_percentage is not None:
+            body["traffic_percentage"] = self.traffic_percentage
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Route:
         """Deserializes the Route from a dictionary."""
-        return cls(served_model_name=d.get('served_model_name', None),
-                   traffic_percentage=d.get('traffic_percentage', None))
+        return cls(
+            served_model_name=d.get("served_model_name", None),
+            traffic_percentage=d.get("traffic_percentage", None),
+        )
 
 
 @dataclass
@@ -2301,53 +2702,73 @@ class ServedEntityInput:
     def as_dict(self) -> dict:
         """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.entity_name is not None: body['entity_name'] = self.entity_name
-        if self.entity_version is not None: body['entity_version'] = self.entity_version
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.external_model: body['external_model'] = self.external_model.as_dict()
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.entity_name is not None:
+            body["entity_name"] = self.entity_name
+        if self.entity_version is not None:
+            body["entity_version"] = self.entity_version
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.external_model:
+            body["external_model"] = self.external_model.as_dict()
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.max_provisioned_throughput is not None:
-            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+            body["max_provisioned_throughput"] = self.max_provisioned_throughput
         if self.min_provisioned_throughput is not None:
-            body['min_provisioned_throughput'] = self.min_provisioned_throughput
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+            body["min_provisioned_throughput"] = self.min_provisioned_throughput
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedEntityInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.entity_name is not None: body['entity_name'] = self.entity_name
-        if self.entity_version is not None: body['entity_version'] = self.entity_version
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.external_model: body['external_model'] = self.external_model
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.entity_name is not None:
+            body["entity_name"] = self.entity_name
+        if self.entity_version is not None:
+            body["entity_version"] = self.entity_version
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.external_model:
+            body["external_model"] = self.external_model
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.max_provisioned_throughput is not None:
-            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+            body["max_provisioned_throughput"] = self.max_provisioned_throughput
         if self.min_provisioned_throughput is not None:
-            body['min_provisioned_throughput'] = self.min_provisioned_throughput
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+            body["min_provisioned_throughput"] = self.min_provisioned_throughput
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntityInput:
         """Deserializes the ServedEntityInput from a dictionary."""
-        return cls(entity_name=d.get('entity_name', None),
-                   entity_version=d.get('entity_version', None),
-                   environment_vars=d.get('environment_vars', None),
-                   external_model=_from_dict(d, 'external_model', ExternalModel),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   max_provisioned_throughput=d.get('max_provisioned_throughput', None),
-                   min_provisioned_throughput=d.get('min_provisioned_throughput', None),
-                   name=d.get('name', None),
-                   scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
-                   workload_size=d.get('workload_size', None),
-                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
+        return cls(
+            entity_name=d.get("entity_name", None),
+            entity_version=d.get("entity_version", None),
+            environment_vars=d.get("environment_vars", None),
+            external_model=_from_dict(d, "external_model", ExternalModel),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            max_provisioned_throughput=d.get("max_provisioned_throughput", None),
+            min_provisioned_throughput=d.get("min_provisioned_throughput", None),
+            name=d.get("name", None),
+            scale_to_zero_enabled=d.get("scale_to_zero_enabled", None),
+            workload_size=d.get("workload_size", None),
+            workload_type=_enum(d, "workload_type", ServingModelWorkloadType),
+        )
 
 
 @dataclass
@@ -2421,65 +2842,93 @@ class ServedEntityOutput:
     def as_dict(self) -> dict:
         """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.entity_name is not None: body['entity_name'] = self.entity_name
-        if self.entity_version is not None: body['entity_version'] = self.entity_version
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.external_model: body['external_model'] = self.external_model.as_dict()
-        if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict()
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.entity_name is not None:
+            body["entity_name"] = self.entity_name
+        if self.entity_version is not None:
+            body["entity_version"] = self.entity_version
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.external_model:
+            body["external_model"] = self.external_model.as_dict()
+        if self.foundation_model:
+            body["foundation_model"] = self.foundation_model.as_dict()
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.max_provisioned_throughput is not None:
-            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+            body["max_provisioned_throughput"] = self.max_provisioned_throughput
         if self.min_provisioned_throughput is not None:
-            body['min_provisioned_throughput'] = self.min_provisioned_throughput
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.state: body['state'] = self.state.as_dict()
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+            body["min_provisioned_throughput"] = self.min_provisioned_throughput
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedEntityOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.entity_name is not None: body['entity_name'] = self.entity_name
-        if self.entity_version is not None: body['entity_version'] = self.entity_version
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.external_model: body['external_model'] = self.external_model
-        if self.foundation_model: body['foundation_model'] = self.foundation_model
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.entity_name is not None:
+            body["entity_name"] = self.entity_name
+        if self.entity_version is not None:
+            body["entity_version"] = self.entity_version
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.external_model:
+            body["external_model"] = self.external_model
+        if self.foundation_model:
+            body["foundation_model"] = self.foundation_model
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.max_provisioned_throughput is not None:
-            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+            body["max_provisioned_throughput"] = self.max_provisioned_throughput
         if self.min_provisioned_throughput is not None:
-            body['min_provisioned_throughput'] = self.min_provisioned_throughput
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.state: body['state'] = self.state
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+            body["min_provisioned_throughput"] = self.min_provisioned_throughput
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.state:
+            body["state"] = self.state
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntityOutput:
         """Deserializes the ServedEntityOutput from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   creator=d.get('creator', None),
-                   entity_name=d.get('entity_name', None),
-                   entity_version=d.get('entity_version', None),
-                   environment_vars=d.get('environment_vars', None),
-                   external_model=_from_dict(d, 'external_model', ExternalModel),
-                   foundation_model=_from_dict(d, 'foundation_model', FoundationModel),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   max_provisioned_throughput=d.get('max_provisioned_throughput', None),
-                   min_provisioned_throughput=d.get('min_provisioned_throughput', None),
-                   name=d.get('name', None),
-                   scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
-                   state=_from_dict(d, 'state', ServedModelState),
-                   workload_size=d.get('workload_size', None),
-                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            creator=d.get("creator", None),
+            entity_name=d.get("entity_name", None),
+            entity_version=d.get("entity_version", None),
+            environment_vars=d.get("environment_vars", None),
+            external_model=_from_dict(d, "external_model", ExternalModel),
+            foundation_model=_from_dict(d, "foundation_model", FoundationModel),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            max_provisioned_throughput=d.get("max_provisioned_throughput", None),
+            min_provisioned_throughput=d.get("min_provisioned_throughput", None),
+            name=d.get("name", None),
+            scale_to_zero_enabled=d.get("scale_to_zero_enabled", None),
+            state=_from_dict(d, "state", ServedModelState),
+            workload_size=d.get("workload_size", None),
+            workload_type=_enum(d, "workload_type", ServingModelWorkloadType),
+        )
 
 
 @dataclass
@@ -2499,31 +2948,43 @@ class ServedEntitySpec:
     def as_dict(self) -> dict:
         """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.entity_name is not None: body['entity_name'] = self.entity_name
-        if self.entity_version is not None: body['entity_version'] = self.entity_version
-        if self.external_model: body['external_model'] = self.external_model.as_dict()
-        if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict()
-        if self.name is not None: body['name'] = self.name
+        if self.entity_name is not None:
+            body["entity_name"] = self.entity_name
+        if self.entity_version is not None:
+            body["entity_version"] = self.entity_version
+        if self.external_model:
+            body["external_model"] = self.external_model.as_dict()
+        if self.foundation_model:
+            body["foundation_model"] = self.foundation_model.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedEntitySpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.entity_name is not None: body['entity_name'] = self.entity_name
-        if self.entity_version is not None: body['entity_version'] = self.entity_version
-        if self.external_model: body['external_model'] = self.external_model
-        if self.foundation_model: body['foundation_model'] = self.foundation_model
-        if self.name is not None: body['name'] = self.name
+        if self.entity_name is not None:
+            body["entity_name"] = self.entity_name
+        if self.entity_version is not None:
+            body["entity_version"] = self.entity_version
+        if self.external_model:
+            body["external_model"] = self.external_model
+        if self.foundation_model:
+            body["foundation_model"] = self.foundation_model
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec:
         """Deserializes the ServedEntitySpec from a dictionary."""
-        return cls(entity_name=d.get('entity_name', None),
-                   entity_version=d.get('entity_version', None),
-                   external_model=_from_dict(d, 'external_model', ExternalModel),
-                   foundation_model=_from_dict(d, 'foundation_model', FoundationModel),
-                   name=d.get('name', None))
+        return cls(
+            entity_name=d.get("entity_name", None),
+            entity_version=d.get("entity_version", None),
+            external_model=_from_dict(d, "external_model", ExternalModel),
+            foundation_model=_from_dict(d, "foundation_model", FoundationModel),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -2575,66 +3036,84 @@ class ServedModelInput:
     def as_dict(self) -> dict:
         """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.max_provisioned_throughput is not None:
-            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+            body["max_provisioned_throughput"] = self.max_provisioned_throughput
         if self.min_provisioned_throughput is not None:
-            body['min_provisioned_throughput'] = self.min_provisioned_throughput
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.model_version is not None: body['model_version'] = self.model_version
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.workload_size is not None: body['workload_size'] = self.workload_size.value
-        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+            body["min_provisioned_throughput"] = self.min_provisioned_throughput
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.model_version is not None:
+            body["model_version"] = self.model_version
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size.value
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedModelInput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
         if self.max_provisioned_throughput is not None:
-            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+            body["max_provisioned_throughput"] = self.max_provisioned_throughput
         if self.min_provisioned_throughput is not None:
-            body['min_provisioned_throughput'] = self.min_provisioned_throughput
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.model_version is not None: body['model_version'] = self.model_version
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+            body["min_provisioned_throughput"] = self.min_provisioned_throughput
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.model_version is not None:
+            body["model_version"] = self.model_version
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelInput:
         """Deserializes the ServedModelInput from a dictionary."""
-        return cls(environment_vars=d.get('environment_vars', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   max_provisioned_throughput=d.get('max_provisioned_throughput', None),
-                   min_provisioned_throughput=d.get('min_provisioned_throughput', None),
-                   model_name=d.get('model_name', None),
-                   model_version=d.get('model_version', None),
-                   name=d.get('name', None),
-                   scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
-                   workload_size=_enum(d, 'workload_size', ServedModelInputWorkloadSize),
-                   workload_type=_enum(d, 'workload_type', ServedModelInputWorkloadType))
+        return cls(
+            environment_vars=d.get("environment_vars", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            max_provisioned_throughput=d.get("max_provisioned_throughput", None),
+            min_provisioned_throughput=d.get("min_provisioned_throughput", None),
+            model_name=d.get("model_name", None),
+            model_version=d.get("model_version", None),
+            name=d.get("name", None),
+            scale_to_zero_enabled=d.get("scale_to_zero_enabled", None),
+            workload_size=_enum(d, "workload_size", ServedModelInputWorkloadSize),
+            workload_type=_enum(d, "workload_type", ServedModelInputWorkloadType),
+        )
 
 
 class ServedModelInputWorkloadSize(Enum):
 
-    LARGE = 'Large'
-    MEDIUM = 'Medium'
-    SMALL = 'Small'
+    LARGE = "Large"
+    MEDIUM = "Medium"
+    SMALL = "Small"
 
 
 class ServedModelInputWorkloadType(Enum):
 
-    CPU = 'CPU'
-    GPU_LARGE = 'GPU_LARGE'
-    GPU_MEDIUM = 'GPU_MEDIUM'
-    GPU_SMALL = 'GPU_SMALL'
-    MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM'
+    CPU = "CPU"
+    GPU_LARGE = "GPU_LARGE"
+    GPU_MEDIUM = "GPU_MEDIUM"
+    GPU_SMALL = "GPU_SMALL"
+    MULTIGPU_MEDIUM = "MULTIGPU_MEDIUM"
 
 
 @dataclass
@@ -2686,49 +3165,73 @@ class ServedModelOutput:
     def as_dict(self) -> dict:
         """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.model_version is not None: body['model_version'] = self.model_version
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.state: body['state'] = self.state.as_dict()
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.model_version is not None:
+            body["model_version"] = self.model_version
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedModelOutput into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.environment_vars: body['environment_vars'] = self.environment_vars
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.model_version is not None: body['model_version'] = self.model_version
-        if self.name is not None: body['name'] = self.name
-        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
-        if self.state: body['state'] = self.state
-        if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.environment_vars:
+            body["environment_vars"] = self.environment_vars
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.model_version is not None:
+            body["model_version"] = self.model_version
+        if self.name is not None:
+            body["name"] = self.name
+        if self.scale_to_zero_enabled is not None:
+            body["scale_to_zero_enabled"] = self.scale_to_zero_enabled
+        if self.state:
+            body["state"] = self.state
+        if self.workload_size is not None:
+            body["workload_size"] = self.workload_size
+        if self.workload_type is not None:
+            body["workload_type"] = self.workload_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelOutput:
         """Deserializes the ServedModelOutput from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   creator=d.get('creator', None),
-                   environment_vars=d.get('environment_vars', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   model_name=d.get('model_name', None),
-                   model_version=d.get('model_version', None),
-                   name=d.get('name', None),
-                   scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
-                   state=_from_dict(d, 'state', ServedModelState),
-                   workload_size=d.get('workload_size', None),
-                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            creator=d.get("creator", None),
+            environment_vars=d.get("environment_vars", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            model_name=d.get("model_name", None),
+            model_version=d.get("model_version", None),
+            name=d.get("name", None),
+            scale_to_zero_enabled=d.get("scale_to_zero_enabled", None),
+            state=_from_dict(d, "state", ServedModelState),
+            workload_size=d.get("workload_size", None),
+            workload_type=_enum(d, "workload_type", ServingModelWorkloadType),
+        )
 
 
 @dataclass
@@ -2744,25 +3247,33 @@ class ServedModelSpec:
     def as_dict(self) -> dict:
         """Serializes the ServedModelSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.model_version is not None: body['model_version'] = self.model_version
-        if self.name is not None: body['name'] = self.name
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.model_version is not None:
+            body["model_version"] = self.model_version
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedModelSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.model_name is not None: body['model_name'] = self.model_name
-        if self.model_version is not None: body['model_version'] = self.model_version
-        if self.name is not None: body['name'] = self.name
+        if self.model_name is not None:
+            body["model_name"] = self.model_name
+        if self.model_version is not None:
+            body["model_version"] = self.model_version
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec:
         """Deserializes the ServedModelSpec from a dictionary."""
-        return cls(model_name=d.get('model_name', None),
-                   model_version=d.get('model_version', None),
-                   name=d.get('name', None))
+        return cls(
+            model_name=d.get("model_name", None),
+            model_version=d.get("model_version", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -2774,33 +3285,37 @@ class ServedModelState:
     def as_dict(self) -> dict:
         """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.deployment is not None: body['deployment'] = self.deployment.value
+        if self.deployment is not None:
+            body["deployment"] = self.deployment.value
         if self.deployment_state_message is not None:
-            body['deployment_state_message'] = self.deployment_state_message
+            body["deployment_state_message"] = self.deployment_state_message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServedModelState into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.deployment is not None: body['deployment'] = self.deployment
+        if self.deployment is not None:
+            body["deployment"] = self.deployment
         if self.deployment_state_message is not None:
-            body['deployment_state_message'] = self.deployment_state_message
+            body["deployment_state_message"] = self.deployment_state_message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelState:
         """Deserializes the ServedModelState from a dictionary."""
-        return cls(deployment=_enum(d, 'deployment', ServedModelStateDeployment),
-                   deployment_state_message=d.get('deployment_state_message', None))
+        return cls(
+            deployment=_enum(d, "deployment", ServedModelStateDeployment),
+            deployment_state_message=d.get("deployment_state_message", None),
+        )
 
 
 class ServedModelStateDeployment(Enum):
 
-    ABORTED = 'DEPLOYMENT_ABORTED'
-    CREATING = 'DEPLOYMENT_CREATING'
-    FAILED = 'DEPLOYMENT_FAILED'
-    READY = 'DEPLOYMENT_READY'
-    RECOVERING = 'DEPLOYMENT_RECOVERING'
+    ABORTED = "DEPLOYMENT_ABORTED"
+    CREATING = "DEPLOYMENT_CREATING"
+    FAILED = "DEPLOYMENT_FAILED"
+    READY = "DEPLOYMENT_READY"
+    RECOVERING = "DEPLOYMENT_RECOVERING"
 
 
 @dataclass
@@ -2811,19 +3326,21 @@ class ServerLogsResponse:
     def as_dict(self) -> dict:
         """Serializes the ServerLogsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.logs is not None: body['logs'] = self.logs
+        if self.logs is not None:
+            body["logs"] = self.logs
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServerLogsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.logs is not None: body['logs'] = self.logs
+        if self.logs is not None:
+            body["logs"] = self.logs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
         """Deserializes the ServerLogsResponse from a dictionary."""
-        return cls(logs=d.get('logs', None))
+        return cls(logs=d.get("logs", None))
 
 
 @dataclass
@@ -2862,48 +3379,68 @@ class ServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
-        if self.config: body['config'] = self.config.as_dict()
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.id is not None: body['id'] = self.id
+        if self.ai_gateway:
+            body["ai_gateway"] = self.ai_gateway.as_dict()
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.state: body['state'] = self.state.as_dict()
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.task is not None: body['task'] = self.task
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.task is not None:
+            body["task"] = self.task
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpoint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
-        if self.config: body['config'] = self.config
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.id is not None: body['id'] = self.id
+        if self.ai_gateway:
+            body["ai_gateway"] = self.ai_gateway
+        if self.config:
+            body["config"] = self.config
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.state: body['state'] = self.state
-        if self.tags: body['tags'] = self.tags
-        if self.task is not None: body['task'] = self.task
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.state:
+            body["state"] = self.state
+        if self.tags:
+            body["tags"] = self.tags
+        if self.task is not None:
+            body["task"] = self.task
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint:
         """Deserializes the ServingEndpoint from a dictionary."""
-        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
-                   config=_from_dict(d, 'config', EndpointCoreConfigSummary),
-                   creation_timestamp=d.get('creation_timestamp', None),
-                   creator=d.get('creator', None),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   name=d.get('name', None),
-                   state=_from_dict(d, 'state', EndpointState),
-                   tags=_repeated_dict(d, 'tags', EndpointTag),
-                   task=d.get('task', None))
+        return cls(
+            ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig),
+            config=_from_dict(d, "config", EndpointCoreConfigSummary),
+            creation_timestamp=d.get("creation_timestamp", None),
+            creator=d.get("creator", None),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            name=d.get("name", None),
+            state=_from_dict(d, "state", EndpointState),
+            tags=_repeated_dict(d, "tags", EndpointTag),
+            task=d.get("task", None),
+        )
 
 
 @dataclass
@@ -2923,30 +3460,38 @@ class ServingEndpointAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlRequest:
         """Deserializes the ServingEndpointAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -2969,33 +3514,43 @@ class ServingEndpointAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
         """Deserializes the ServingEndpointAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', ServingEndpointPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", ServingEndpointPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -3050,70 +3605,100 @@ class ServingEndpointDetailed:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointDetailed into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
-        if self.config: body['config'] = self.config.as_dict()
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.data_plane_info: body['data_plane_info'] = self.data_plane_info.as_dict()
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
-        if self.id is not None: body['id'] = self.id
+        if self.ai_gateway:
+            body["ai_gateway"] = self.ai_gateway.as_dict()
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.data_plane_info:
+            body["data_plane_info"] = self.data_plane_info.as_dict()
+        if self.endpoint_url is not None:
+            body["endpoint_url"] = self.endpoint_url
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.pending_config: body['pending_config'] = self.pending_config.as_dict()
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
-        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
-        if self.state: body['state'] = self.state.as_dict()
-        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
-        if self.task is not None: body['task'] = self.task
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pending_config:
+            body["pending_config"] = self.pending_config.as_dict()
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
+        if self.route_optimized is not None:
+            body["route_optimized"] = self.route_optimized
+        if self.state:
+            body["state"] = self.state.as_dict()
+        if self.tags:
+            body["tags"] = [v.as_dict() for v in self.tags]
+        if self.task is not None:
+            body["task"] = self.task
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointDetailed into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
-        if self.config: body['config'] = self.config
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.data_plane_info: body['data_plane_info'] = self.data_plane_info
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
-        if self.id is not None: body['id'] = self.id
+        if self.ai_gateway:
+            body["ai_gateway"] = self.ai_gateway
+        if self.config:
+            body["config"] = self.config
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.data_plane_info:
+            body["data_plane_info"] = self.data_plane_info
+        if self.endpoint_url is not None:
+            body["endpoint_url"] = self.endpoint_url
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.name is not None: body['name'] = self.name
-        if self.pending_config: body['pending_config'] = self.pending_config
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
-        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
-        if self.state: body['state'] = self.state
-        if self.tags: body['tags'] = self.tags
-        if self.task is not None: body['task'] = self.task
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pending_config:
+            body["pending_config"] = self.pending_config
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
+        if self.route_optimized is not None:
+            body["route_optimized"] = self.route_optimized
+        if self.state:
+            body["state"] = self.state
+        if self.tags:
+            body["tags"] = self.tags
+        if self.task is not None:
+            body["task"] = self.task
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
         """Deserializes the ServingEndpointDetailed from a dictionary."""
-        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
-                   config=_from_dict(d, 'config', EndpointCoreConfigOutput),
-                   creation_timestamp=d.get('creation_timestamp', None),
-                   creator=d.get('creator', None),
-                   data_plane_info=_from_dict(d, 'data_plane_info', ModelDataPlaneInfo),
-                   endpoint_url=d.get('endpoint_url', None),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   name=d.get('name', None),
-                   pending_config=_from_dict(d, 'pending_config', EndpointPendingConfig),
-                   permission_level=_enum(d, 'permission_level', ServingEndpointDetailedPermissionLevel),
-                   route_optimized=d.get('route_optimized', None),
-                   state=_from_dict(d, 'state', EndpointState),
-                   tags=_repeated_dict(d, 'tags', EndpointTag),
-                   task=d.get('task', None))
+        return cls(
+            ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig),
+            config=_from_dict(d, "config", EndpointCoreConfigOutput),
+            creation_timestamp=d.get("creation_timestamp", None),
+            creator=d.get("creator", None),
+            data_plane_info=_from_dict(d, "data_plane_info", ModelDataPlaneInfo),
+            endpoint_url=d.get("endpoint_url", None),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            name=d.get("name", None),
+            pending_config=_from_dict(d, "pending_config", EndpointPendingConfig),
+            permission_level=_enum(d, "permission_level", ServingEndpointDetailedPermissionLevel),
+            route_optimized=d.get("route_optimized", None),
+            state=_from_dict(d, "state", EndpointState),
+            tags=_repeated_dict(d, "tags", EndpointTag),
+            task=d.get("task", None),
+        )
 
 
 class ServingEndpointDetailedPermissionLevel(Enum):
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_QUERY = 'CAN_QUERY'
-    CAN_VIEW = 'CAN_VIEW'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_QUERY = "CAN_QUERY"
+    CAN_VIEW = "CAN_VIEW"
 
 
 @dataclass
@@ -3128,33 +3713,41 @@ class ServingEndpointPermission:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermission:
         """Deserializes the ServingEndpointPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel),
+        )
 
 
 class ServingEndpointPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_QUERY = 'CAN_QUERY'
-    CAN_VIEW = 'CAN_VIEW'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_QUERY = "CAN_QUERY"
+    CAN_VIEW = "CAN_VIEW"
 
 
 @dataclass
@@ -3169,26 +3762,32 @@ def as_dict(self) -> dict:
         """Serializes the ServingEndpointPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissions:
         """Deserializes the ServingEndpointPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      ServingEndpointAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -3201,22 +3800,28 @@ class ServingEndpointPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsDescription:
         """Deserializes the ServingEndpointPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel),
+        )
 
 
 @dataclass
@@ -3230,32 +3835,36 @@ def as_dict(self) -> dict:
         """Serializes the ServingEndpointPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.serving_endpoint_id is not None:
+            body["serving_endpoint_id"] = self.serving_endpoint_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.serving_endpoint_id is not None:
+            body["serving_endpoint_id"] = self.serving_endpoint_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest:
         """Deserializes the ServingEndpointPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      ServingEndpointAccessControlRequest),
-                   serving_endpoint_id=d.get('serving_endpoint_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlRequest),
+            serving_endpoint_id=d.get("serving_endpoint_id", None),
+        )
 
 
 class ServingModelWorkloadType(Enum):
 
-    CPU = 'CPU'
-    GPU_LARGE = 'GPU_LARGE'
-    GPU_MEDIUM = 'GPU_MEDIUM'
-    GPU_SMALL = 'GPU_SMALL'
-    MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM'
+    CPU = "CPU"
+    GPU_LARGE = "GPU_LARGE"
+    GPU_MEDIUM = "GPU_MEDIUM"
+    GPU_SMALL = "GPU_SMALL"
+    MULTIGPU_MEDIUM = "MULTIGPU_MEDIUM"
 
 
 @dataclass
@@ -3266,19 +3875,21 @@ class TrafficConfig:
     def as_dict(self) -> dict:
         """Serializes the TrafficConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.routes: body['routes'] = [v.as_dict() for v in self.routes]
+        if self.routes:
+            body["routes"] = [v.as_dict() for v in self.routes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TrafficConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.routes: body['routes'] = self.routes
+        if self.routes:
+            body["routes"] = self.routes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TrafficConfig:
         """Deserializes the TrafficConfig from a dictionary."""
-        return cls(routes=_repeated_dict(d, 'routes', Route))
+        return cls(routes=_repeated_dict(d, "routes", Route))
 
 
 @dataclass
@@ -3301,36 +3912,48 @@ class V1ResponseChoiceElement:
     def as_dict(self) -> dict:
         """Serializes the V1ResponseChoiceElement into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.finish_reason is not None: body['finishReason'] = self.finish_reason
-        if self.index is not None: body['index'] = self.index
-        if self.logprobs is not None: body['logprobs'] = self.logprobs
-        if self.message: body['message'] = self.message.as_dict()
-        if self.text is not None: body['text'] = self.text
+        if self.finish_reason is not None:
+            body["finishReason"] = self.finish_reason
+        if self.index is not None:
+            body["index"] = self.index
+        if self.logprobs is not None:
+            body["logprobs"] = self.logprobs
+        if self.message:
+            body["message"] = self.message.as_dict()
+        if self.text is not None:
+            body["text"] = self.text
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the V1ResponseChoiceElement into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.finish_reason is not None: body['finishReason'] = self.finish_reason
-        if self.index is not None: body['index'] = self.index
-        if self.logprobs is not None: body['logprobs'] = self.logprobs
-        if self.message: body['message'] = self.message
-        if self.text is not None: body['text'] = self.text
+        if self.finish_reason is not None:
+            body["finishReason"] = self.finish_reason
+        if self.index is not None:
+            body["index"] = self.index
+        if self.logprobs is not None:
+            body["logprobs"] = self.logprobs
+        if self.message:
+            body["message"] = self.message
+        if self.text is not None:
+            body["text"] = self.text
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement:
         """Deserializes the V1ResponseChoiceElement from a dictionary."""
-        return cls(finish_reason=d.get('finishReason', None),
-                   index=d.get('index', None),
-                   logprobs=d.get('logprobs', None),
-                   message=_from_dict(d, 'message', ChatMessage),
-                   text=d.get('text', None))
+        return cls(
+            finish_reason=d.get("finishReason", None),
+            index=d.get("index", None),
+            logprobs=d.get("logprobs", None),
+            message=_from_dict(d, "message", ChatMessage),
+            text=d.get("text", None),
+        )
 
 
 class ServingEndpointsAPI:
     """The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
-    
+
     You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog.
     Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means
     the endpoints and associated compute resources are fully managed by Databricks and will not appear in your
@@ -3344,67 +3967,76 @@ def __init__(self, api_client):
         self._api = api_client
 
     def wait_get_serving_endpoint_not_updating(
-            self,
-            name: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[ServingEndpointDetailed], None]] = None) -> ServingEndpointDetailed:
+        self,
+        name: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[ServingEndpointDetailed], None]] = None,
+    ) -> ServingEndpointDetailed:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (EndpointStateConfigUpdate.NOT_UPDATING, )
-        failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, EndpointStateConfigUpdate.UPDATE_CANCELED,
-                          )
-        status_message = 'polling...'
+        target_states = (EndpointStateConfigUpdate.NOT_UPDATING,)
+        failure_states = (
+            EndpointStateConfigUpdate.UPDATE_FAILED,
+            EndpointStateConfigUpdate.UPDATE_CANCELED,
+        )
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(name=name)
             status = poll.state.config_update
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach NOT_UPDATING, got {status}: {status_message}'
+                msg = f"failed to reach NOT_UPDATING, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"name={name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse:
         """Get build logs for a served model.
-        
+
         Retrieves the build logs associated with the provided served model.
-        
+
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
         :param served_model_name: str
           The name of the served model that build logs will be retrieved for. This field is required.
-        
+
         :returns: :class:`BuildLogsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/build-logs',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/build-logs",
+            headers=headers,
+        )
         return BuildLogsResponse.from_dict(res)
 
-    def create(self,
-               name: str,
-               *,
-               ai_gateway: Optional[AiGatewayConfig] = None,
-               config: Optional[EndpointCoreConfigInput] = None,
-               rate_limits: Optional[List[RateLimit]] = None,
-               route_optimized: Optional[bool] = None,
-               tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]:
+    def create(
+        self,
+        name: str,
+        *,
+        ai_gateway: Optional[AiGatewayConfig] = None,
+        config: Optional[EndpointCoreConfigInput] = None,
+        rate_limits: Optional[List[RateLimit]] = None,
+        route_optimized: Optional[bool] = None,
+        tags: Optional[List[EndpointTag]] = None,
+    ) -> Wait[ServingEndpointDetailed]:
         """Create a new serving endpoint.
-        
+
         :param name: str
           The name of the serving endpoint. This field is required and must be unique across a Databricks
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
@@ -3420,24 +4052,35 @@ def create(self,
           Enable route optimization for the serving endpoint.
         :param tags: List[:class:`EndpointTag`] (optional)
           Tags to be attached to the serving endpoint and automatically propagated to billing logs.
-        
+
         :returns:
           Long-running operation waiter for :class:`ServingEndpointDetailed`.
           See :method:wait_get_serving_endpoint_not_updating for more details.
         """
         body = {}
-        if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict()
-        if config is not None: body['config'] = config.as_dict()
-        if name is not None: body['name'] = name
-        if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
-        if route_optimized is not None: body['route_optimized'] = route_optimized
-        if tags is not None: body['tags'] = [v.as_dict() for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.0/serving-endpoints', body=body, headers=headers)
-        return Wait(self.wait_get_serving_endpoint_not_updating,
-                    response=ServingEndpointDetailed.from_dict(op_response),
-                    name=op_response['name'])
+        if ai_gateway is not None:
+            body["ai_gateway"] = ai_gateway.as_dict()
+        if config is not None:
+            body["config"] = config.as_dict()
+        if name is not None:
+            body["name"] = name
+        if rate_limits is not None:
+            body["rate_limits"] = [v.as_dict() for v in rate_limits]
+        if route_optimized is not None:
+            body["route_optimized"] = route_optimized
+        if tags is not None:
+            body["tags"] = [v.as_dict() for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.0/serving-endpoints", body=body, headers=headers)
+        return Wait(
+            self.wait_get_serving_endpoint_not_updating,
+            response=ServingEndpointDetailed.from_dict(op_response),
+            name=op_response["name"],
+        )
 
     def create_and_wait(
         self,
@@ -3448,123 +4091,154 @@ def create_and_wait(
         rate_limits: Optional[List[RateLimit]] = None,
         route_optimized: Optional[bool] = None,
         tags: Optional[List[EndpointTag]] = None,
-        timeout=timedelta(minutes=20)) -> ServingEndpointDetailed:
-        return self.create(ai_gateway=ai_gateway,
-                           config=config,
-                           name=name,
-                           rate_limits=rate_limits,
-                           route_optimized=route_optimized,
-                           tags=tags).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> ServingEndpointDetailed:
+        return self.create(
+            ai_gateway=ai_gateway,
+            config=config,
+            name=name,
+            rate_limits=rate_limits,
+            route_optimized=route_optimized,
+            tags=tags,
+        ).result(timeout=timeout)
 
     def delete(self, name: str):
         """Delete a serving endpoint.
-        
+
         :param name: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/serving-endpoints/{name}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/serving-endpoints/{name}", headers=headers)
 
     def export_metrics(self, name: str) -> ExportMetricsResponse:
         """Get metrics of a serving endpoint.
-        
+
         Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
         OpenMetrics exposition format.
-        
+
         :param name: str
           The name of the serving endpoint to retrieve metrics for. This field is required.
-        
+
         :returns: :class:`ExportMetricsResponse`
         """
 
-        headers = {'Accept': 'text/plain', }
+        headers = {
+            "Accept": "text/plain",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/metrics', headers=headers, raw=True)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/serving-endpoints/{name}/metrics",
+            headers=headers,
+            raw=True,
+        )
         return ExportMetricsResponse.from_dict(res)
 
     def get(self, name: str) -> ServingEndpointDetailed:
         """Get a single serving endpoint.
-        
+
         Retrieves the details for a single serving endpoint.
-        
+
         :param name: str
           The name of the serving endpoint. This field is required.
-        
+
         :returns: :class:`ServingEndpointDetailed`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}", headers=headers)
         return ServingEndpointDetailed.from_dict(res)
 
     def get_open_api(self, name: str) -> GetOpenApiResponse:
         """Get the schema for a serving endpoint.
-        
+
         Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
         the supported paths, input and output format and datatypes.
-        
+
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
-        
+
         :returns: :class:`GetOpenApiResponse`
         """
 
-        headers = {'Accept': 'text/plain', }
+        headers = {
+            "Accept": "text/plain",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers, raw=True)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/serving-endpoints/{name}/openapi",
+            headers=headers,
+            raw=True,
+        )
         return GetOpenApiResponse.from_dict(res)
 
     def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse:
         """Get serving endpoint permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
-        
+
         :returns: :class:`GetServingEndpointPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}/permissionLevels",
+            headers=headers,
+        )
         return GetServingEndpointPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermissions:
         """Get serving endpoint permissions.
-        
+
         Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root
         object.
-        
+
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
-        
+
         :returns: :class:`ServingEndpointPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}",
+            headers=headers,
+        )
         return ServingEndpointPermissions.from_dict(res)
 
-    def http_request(self,
-                     connection_name: str,
-                     method: ExternalFunctionRequestHttpMethod,
-                     path: str,
-                     *,
-                     headers: Optional[str] = None,
-                     json: Optional[str] = None,
-                     params: Optional[str] = None) -> HttpRequestResponse:
+    def http_request(
+        self,
+        connection_name: str,
+        method: ExternalFunctionRequestHttpMethod,
+        path: str,
+        *,
+        headers: Optional[str] = None,
+        json: Optional[str] = None,
+        params: Optional[str] = None,
+    ) -> HttpRequestResponse:
         """Make external services call using the credentials stored in UC Connection.
-        
+
         :param connection_name: str
           The connection name to use. This is required to identify the external connection.
         :param method: :class:`ExternalFunctionRequestHttpMethod`
@@ -3578,115 +4252,155 @@ def http_request(self,
           The JSON payload to send in the request body.
         :param params: str (optional)
           Query parameters for the request.
-        
+
         :returns: :class:`HttpRequestResponse`
         """
         body = {}
-        if connection_name is not None: body['connection_name'] = connection_name
-        if headers is not None: body['headers'] = headers
-        if json is not None: body['json'] = json
-        if method is not None: body['method'] = method.value
-        if params is not None: body['params'] = params
-        if path is not None: body['path'] = path
-        headers = {'Accept': 'text/plain', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/external-function', body=body, headers=headers, raw=True)
+        if connection_name is not None:
+            body["connection_name"] = connection_name
+        if headers is not None:
+            body["headers"] = headers
+        if json is not None:
+            body["json"] = json
+        if method is not None:
+            body["method"] = method.value
+        if params is not None:
+            body["params"] = params
+        if path is not None:
+            body["path"] = path
+        headers = {
+            "Accept": "text/plain",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/external-function",
+            body=body,
+            headers=headers,
+            raw=True,
+        )
         return HttpRequestResponse.from_dict(res)
 
     def list(self) -> Iterator[ServingEndpoint]:
         """Get all serving endpoints.
-        
+
         :returns: Iterator over :class:`ServingEndpoint`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/serving-endpoints', headers=headers)
+        json = self._api.do("GET", "/api/2.0/serving-endpoints", headers=headers)
         parsed = ListEndpointsResponse.from_dict(json).endpoints
         return parsed if parsed is not None else []
 
     def logs(self, name: str, served_model_name: str) -> ServerLogsResponse:
         """Get the latest logs for a served model.
-        
+
         Retrieves the service logs associated with the provided served model.
-        
+
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
         :param served_model_name: str
           The name of the served model that logs will be retrieved for. This field is required.
-        
+
         :returns: :class:`ServerLogsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/logs',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/logs",
+            headers=headers,
+        )
         return ServerLogsResponse.from_dict(res)
 
-    def patch(self,
-              name: str,
-              *,
-              add_tags: Optional[List[EndpointTag]] = None,
-              delete_tags: Optional[List[str]] = None) -> EndpointTags:
+    def patch(
+        self,
+        name: str,
+        *,
+        add_tags: Optional[List[EndpointTag]] = None,
+        delete_tags: Optional[List[str]] = None,
+    ) -> EndpointTags:
         """Update tags of a serving endpoint.
-        
+
         Used to batch add and delete tags from a serving endpoint with a single API call.
-        
+
         :param name: str
           The name of the serving endpoint who's tags to patch. This field is required.
         :param add_tags: List[:class:`EndpointTag`] (optional)
           List of endpoint tags to add
         :param delete_tags: List[str] (optional)
           List of tag keys to delete
-        
+
         :returns: :class:`EndpointTags`
         """
         body = {}
-        if add_tags is not None: body['add_tags'] = [v.as_dict() for v in add_tags]
-        if delete_tags is not None: body['delete_tags'] = [v for v in delete_tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/serving-endpoints/{name}/tags', body=body, headers=headers)
+        if add_tags is not None:
+            body["add_tags"] = [v.as_dict() for v in add_tags]
+        if delete_tags is not None:
+            body["delete_tags"] = [v for v in delete_tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/serving-endpoints/{name}/tags",
+            body=body,
+            headers=headers,
+        )
         return EndpointTags.from_dict(res)
 
     def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse:
         """Update rate limits of a serving endpoint.
-        
+
         Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
         currently supported. For external models, use AI Gateway to manage rate limits.
-        
+
         :param name: str
           The name of the serving endpoint whose rate limits are being updated. This field is required.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           The list of endpoint rate limits.
-        
+
         :returns: :class:`PutResponse`
         """
         body = {}
-        if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/serving-endpoints/{name}/rate-limits',
-                           body=body,
-                           headers=headers)
+        if rate_limits is not None:
+            body["rate_limits"] = [v.as_dict() for v in rate_limits]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/serving-endpoints/{name}/rate-limits",
+            body=body,
+            headers=headers,
+        )
         return PutResponse.from_dict(res)
 
     def put_ai_gateway(
-            self,
-            name: str,
-            *,
-            guardrails: Optional[AiGatewayGuardrails] = None,
-            inference_table_config: Optional[AiGatewayInferenceTableConfig] = None,
-            rate_limits: Optional[List[AiGatewayRateLimit]] = None,
-            usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse:
+        self,
+        name: str,
+        *,
+        guardrails: Optional[AiGatewayGuardrails] = None,
+        inference_table_config: Optional[AiGatewayInferenceTableConfig] = None,
+        rate_limits: Optional[List[AiGatewayRateLimit]] = None,
+        usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None,
+    ) -> PutAiGatewayResponse:
         """Update AI Gateway of a serving endpoint.
-        
+
         Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
         throughput endpoints are currently supported.
-        
+
         :param name: str
           The name of the serving endpoint whose AI Gateway is being updated. This field is required.
         :param guardrails: :class:`AiGatewayGuardrails` (optional)
@@ -3699,38 +4413,51 @@ def put_ai_gateway(
         :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
           Configuration to enable usage tracking using system tables. These tables allow you to monitor
           operational usage on endpoints and their associated costs.
-        
+
         :returns: :class:`PutAiGatewayResponse`
         """
         body = {}
-        if guardrails is not None: body['guardrails'] = guardrails.as_dict()
+        if guardrails is not None:
+            body["guardrails"] = guardrails.as_dict()
         if inference_table_config is not None:
-            body['inference_table_config'] = inference_table_config.as_dict()
-        if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
-        if usage_tracking_config is not None: body['usage_tracking_config'] = usage_tracking_config.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT', f'/api/2.0/serving-endpoints/{name}/ai-gateway', body=body, headers=headers)
+            body["inference_table_config"] = inference_table_config.as_dict()
+        if rate_limits is not None:
+            body["rate_limits"] = [v.as_dict() for v in rate_limits]
+        if usage_tracking_config is not None:
+            body["usage_tracking_config"] = usage_tracking_config.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/serving-endpoints/{name}/ai-gateway",
+            body=body,
+            headers=headers,
+        )
         return PutAiGatewayResponse.from_dict(res)
 
-    def query(self,
-              name: str,
-              *,
-              dataframe_records: Optional[List[Any]] = None,
-              dataframe_split: Optional[DataframeSplitInput] = None,
-              extra_params: Optional[Dict[str, str]] = None,
-              input: Optional[Any] = None,
-              inputs: Optional[Any] = None,
-              instances: Optional[List[Any]] = None,
-              max_tokens: Optional[int] = None,
-              messages: Optional[List[ChatMessage]] = None,
-              n: Optional[int] = None,
-              prompt: Optional[Any] = None,
-              stop: Optional[List[str]] = None,
-              stream: Optional[bool] = None,
-              temperature: Optional[float] = None) -> QueryEndpointResponse:
+    def query(
+        self,
+        name: str,
+        *,
+        dataframe_records: Optional[List[Any]] = None,
+        dataframe_split: Optional[DataframeSplitInput] = None,
+        extra_params: Optional[Dict[str, str]] = None,
+        input: Optional[Any] = None,
+        inputs: Optional[Any] = None,
+        instances: Optional[List[Any]] = None,
+        max_tokens: Optional[int] = None,
+        messages: Optional[List[ChatMessage]] = None,
+        n: Optional[int] = None,
+        prompt: Optional[Any] = None,
+        stop: Optional[List[str]] = None,
+        stream: Optional[bool] = None,
+        temperature: Optional[float] = None,
+    ) -> QueryEndpointResponse:
         """Query a serving endpoint.
-        
+
         :param name: str
           The name of the serving endpoint. This field is required.
         :param dataframe_records: List[Any] (optional)
@@ -3774,73 +4501,100 @@ def query(self,
           The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
           endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
           other chat/completions query fields.
-        
+
         :returns: :class:`QueryEndpointResponse`
         """
         body = {}
-        if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records]
-        if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict()
-        if extra_params is not None: body['extra_params'] = extra_params
-        if input is not None: body['input'] = input
-        if inputs is not None: body['inputs'] = inputs
-        if instances is not None: body['instances'] = [v for v in instances]
-        if max_tokens is not None: body['max_tokens'] = max_tokens
-        if messages is not None: body['messages'] = [v.as_dict() for v in messages]
-        if n is not None: body['n'] = n
-        if prompt is not None: body['prompt'] = prompt
-        if stop is not None: body['stop'] = [v for v in stop]
-        if stream is not None: body['stream'] = stream
-        if temperature is not None: body['temperature'] = temperature
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-        response_headers = ['served-model-name', ]
-        res = self._api.do('POST',
-                           f'/serving-endpoints/{name}/invocations',
-                           body=body,
-                           headers=headers,
-                           response_headers=response_headers)
+        if dataframe_records is not None:
+            body["dataframe_records"] = [v for v in dataframe_records]
+        if dataframe_split is not None:
+            body["dataframe_split"] = dataframe_split.as_dict()
+        if extra_params is not None:
+            body["extra_params"] = extra_params
+        if input is not None:
+            body["input"] = input
+        if inputs is not None:
+            body["inputs"] = inputs
+        if instances is not None:
+            body["instances"] = [v for v in instances]
+        if max_tokens is not None:
+            body["max_tokens"] = max_tokens
+        if messages is not None:
+            body["messages"] = [v.as_dict() for v in messages]
+        if n is not None:
+            body["n"] = n
+        if prompt is not None:
+            body["prompt"] = prompt
+        if stop is not None:
+            body["stop"] = [v for v in stop]
+        if stream is not None:
+            body["stream"] = stream
+        if temperature is not None:
+            body["temperature"] = temperature
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+        response_headers = [
+            "served-model-name",
+        ]
+        res = self._api.do(
+            "POST",
+            f"/serving-endpoints/{name}/invocations",
+            body=body,
+            headers=headers,
+            response_headers=response_headers,
+        )
         return QueryEndpointResponse.from_dict(res)
 
     def set_permissions(
         self,
         serving_endpoint_id: str,
         *,
-        access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None
+        access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None,
     ) -> ServingEndpointPermissions:
         """Set serving endpoint permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
         :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ServingEndpointPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}",
+            body=body,
+            headers=headers,
+        )
         return ServingEndpointPermissions.from_dict(res)
 
-    def update_config(self,
-                      name: str,
-                      *,
-                      auto_capture_config: Optional[AutoCaptureConfigInput] = None,
-                      served_entities: Optional[List[ServedEntityInput]] = None,
-                      served_models: Optional[List[ServedModelInput]] = None,
-                      traffic_config: Optional[TrafficConfig] = None) -> Wait[ServingEndpointDetailed]:
+    def update_config(
+        self,
+        name: str,
+        *,
+        auto_capture_config: Optional[AutoCaptureConfigInput] = None,
+        served_entities: Optional[List[ServedEntityInput]] = None,
+        served_models: Optional[List[ServedModelInput]] = None,
+        traffic_config: Optional[TrafficConfig] = None,
+    ) -> Wait[ServingEndpointDetailed]:
         """Update config of a serving endpoint.
-        
+
         Updates any combination of the serving endpoint's served entities, the compute configuration of those
         served entities, and the endpoint's traffic config. An endpoint that already has an update in progress
         can not be updated until the current update completes or fails.
-        
+
         :param name: str
           The name of the serving endpoint to update. This field is required.
         :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
@@ -3855,25 +4609,36 @@ def update_config(self,
           config.
         :param traffic_config: :class:`TrafficConfig` (optional)
           The traffic configuration associated with the serving endpoint config.
-        
+
         :returns:
           Long-running operation waiter for :class:`ServingEndpointDetailed`.
           See :method:wait_get_serving_endpoint_not_updating for more details.
         """
         body = {}
-        if auto_capture_config is not None: body['auto_capture_config'] = auto_capture_config.as_dict()
-        if served_entities is not None: body['served_entities'] = [v.as_dict() for v in served_entities]
-        if served_models is not None: body['served_models'] = [v.as_dict() for v in served_models]
-        if traffic_config is not None: body['traffic_config'] = traffic_config.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('PUT',
-                                   f'/api/2.0/serving-endpoints/{name}/config',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_serving_endpoint_not_updating,
-                    response=ServingEndpointDetailed.from_dict(op_response),
-                    name=op_response['name'])
+        if auto_capture_config is not None:
+            body["auto_capture_config"] = auto_capture_config.as_dict()
+        if served_entities is not None:
+            body["served_entities"] = [v.as_dict() for v in served_entities]
+        if served_models is not None:
+            body["served_models"] = [v.as_dict() for v in served_models]
+        if traffic_config is not None:
+            body["traffic_config"] = traffic_config.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "PUT",
+            f"/api/2.0/serving-endpoints/{name}/config",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_serving_endpoint_not_updating,
+            response=ServingEndpointDetailed.from_dict(op_response),
+            name=op_response["name"],
+        )
 
     def update_config_and_wait(
         self,
@@ -3883,39 +4648,47 @@ def update_config_and_wait(
         served_entities: Optional[List[ServedEntityInput]] = None,
         served_models: Optional[List[ServedModelInput]] = None,
         traffic_config: Optional[TrafficConfig] = None,
-        timeout=timedelta(minutes=20)) -> ServingEndpointDetailed:
-        return self.update_config(auto_capture_config=auto_capture_config,
-                                  name=name,
-                                  served_entities=served_entities,
-                                  served_models=served_models,
-                                  traffic_config=traffic_config).result(timeout=timeout)
+        timeout=timedelta(minutes=20),
+    ) -> ServingEndpointDetailed:
+        return self.update_config(
+            auto_capture_config=auto_capture_config,
+            name=name,
+            served_entities=served_entities,
+            served_models=served_models,
+            traffic_config=traffic_config,
+        ).result(timeout=timeout)
 
     def update_permissions(
         self,
         serving_endpoint_id: str,
         *,
-        access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None
+        access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None,
     ) -> ServingEndpointPermissions:
         """Update serving endpoint permissions.
-        
+
         Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their
         root object.
-        
+
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
         :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
-        
+
         :returns: :class:`ServingEndpointPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}",
+            body=body,
+            headers=headers,
+        )
         return ServingEndpointPermissions.from_dict(res)
 
 
@@ -3927,26 +4700,29 @@ def __init__(self, api_client, control_plane):
         self._api = api_client
         self._control_plane = control_plane
         from ..data_plane import DataPlaneService
+
         self._data_plane_service = DataPlaneService()
 
-    def query(self,
-              name: str,
-              *,
-              dataframe_records: Optional[List[Any]] = None,
-              dataframe_split: Optional[DataframeSplitInput] = None,
-              extra_params: Optional[Dict[str, str]] = None,
-              input: Optional[Any] = None,
-              inputs: Optional[Any] = None,
-              instances: Optional[List[Any]] = None,
-              max_tokens: Optional[int] = None,
-              messages: Optional[List[ChatMessage]] = None,
-              n: Optional[int] = None,
-              prompt: Optional[Any] = None,
-              stop: Optional[List[str]] = None,
-              stream: Optional[bool] = None,
-              temperature: Optional[float] = None) -> QueryEndpointResponse:
+    def query(
+        self,
+        name: str,
+        *,
+        dataframe_records: Optional[List[Any]] = None,
+        dataframe_split: Optional[DataframeSplitInput] = None,
+        extra_params: Optional[Dict[str, str]] = None,
+        input: Optional[Any] = None,
+        inputs: Optional[Any] = None,
+        instances: Optional[List[Any]] = None,
+        max_tokens: Optional[int] = None,
+        messages: Optional[List[ChatMessage]] = None,
+        n: Optional[int] = None,
+        prompt: Optional[Any] = None,
+        stop: Optional[List[str]] = None,
+        stream: Optional[bool] = None,
+        temperature: Optional[float] = None,
+    ) -> QueryEndpointResponse:
         """Query a serving endpoint.
-        
+
         :param name: str
           The name of the serving endpoint. This field is required.
         :param dataframe_records: List[Any] (optional)
@@ -3990,33 +4766,51 @@ def query(self,
           The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
           endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
           other chat/completions query fields.
-        
+
         :returns: :class:`QueryEndpointResponse`
         """
         body = {}
-        if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records]
-        if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict()
-        if extra_params is not None: body['extra_params'] = extra_params
-        if input is not None: body['input'] = input
-        if inputs is not None: body['inputs'] = inputs
-        if instances is not None: body['instances'] = [v for v in instances]
-        if max_tokens is not None: body['max_tokens'] = max_tokens
-        if messages is not None: body['messages'] = [v.as_dict() for v in messages]
-        if n is not None: body['n'] = n
-        if prompt is not None: body['prompt'] = prompt
-        if stop is not None: body['stop'] = [v for v in stop]
-        if stream is not None: body['stream'] = stream
-        if temperature is not None: body['temperature'] = temperature
+        if dataframe_records is not None:
+            body["dataframe_records"] = [v for v in dataframe_records]
+        if dataframe_split is not None:
+            body["dataframe_split"] = dataframe_split.as_dict()
+        if extra_params is not None:
+            body["extra_params"] = extra_params
+        if input is not None:
+            body["input"] = input
+        if inputs is not None:
+            body["inputs"] = inputs
+        if instances is not None:
+            body["instances"] = [v for v in instances]
+        if max_tokens is not None:
+            body["max_tokens"] = max_tokens
+        if messages is not None:
+            body["messages"] = [v.as_dict() for v in messages]
+        if n is not None:
+            body["n"] = n
+        if prompt is not None:
+            body["prompt"] = prompt
+        if stop is not None:
+            body["stop"] = [v for v in stop]
+        if stream is not None:
+            body["stream"] = stream
+        if temperature is not None:
+            body["temperature"] = temperature
 
         def info_getter():
-            response = self._control_plane.get(name=name, )
+            response = self._control_plane.get(
+                name=name,
+            )
             if response.data_plane_info is None:
                 raise Exception("Resource does not support direct Data Plane access")
             return response.data_plane_info.query_info
 
-        get_params = [name, ]
-        data_plane_details = self._data_plane_service.get_data_plane_details('query', get_params, info_getter,
-                                                                             self._api.get_oauth_token)
+        get_params = [
+            name,
+        ]
+        data_plane_details = self._data_plane_service.get_data_plane_details(
+            "query", get_params, info_getter, self._api.get_oauth_token
+        )
         token = data_plane_details.token
 
         def auth(r: requests.PreparedRequest) -> requests.PreparedRequest:
@@ -4024,12 +4818,19 @@ def auth(r: requests.PreparedRequest) -> requests.PreparedRequest:
             r.headers["Authorization"] = authorization
             return r
 
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-        response_headers = ['served-model-name', ]
-        res = self._api.do('POST',
-                           url=data_plane_details.endpoint_url,
-                           body=body,
-                           headers=headers,
-                           response_headers=response_headers,
-                           auth=auth)
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+        response_headers = [
+            "served-model-name",
+        ]
+        res = self._api.do(
+            "POST",
+            url=data_plane_details.endpoint_url,
+            body=body,
+            headers=headers,
+            response_headers=response_headers,
+            auth=auth,
+        )
         return QueryEndpointResponse.from_dict(res)
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index 42c8c882d..8e8e81cd6 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -35,25 +35,33 @@ class AccountIpAccessEnable:
     def as_dict(self) -> dict:
         """Serializes the AccountIpAccessEnable into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.acct_ip_acl_enable:
+            body["acct_ip_acl_enable"] = self.acct_ip_acl_enable.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccountIpAccessEnable into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.acct_ip_acl_enable:
+            body["acct_ip_acl_enable"] = self.acct_ip_acl_enable
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountIpAccessEnable:
         """Deserializes the AccountIpAccessEnable from a dictionary."""
-        return cls(acct_ip_acl_enable=_from_dict(d, 'acct_ip_acl_enable', BooleanMessage),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            acct_ip_acl_enable=_from_dict(d, "acct_ip_acl_enable", BooleanMessage),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -63,27 +71,34 @@ class AibiDashboardEmbeddingAccessPolicy:
     def as_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value
+        if self.access_policy_type is not None:
+            body["access_policy_type"] = self.access_policy_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type
+        if self.access_policy_type is not None:
+            body["access_policy_type"] = self.access_policy_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicy:
         """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary."""
-        return cls(access_policy_type=_enum(d, 'access_policy_type',
-                                            AibiDashboardEmbeddingAccessPolicyAccessPolicyType))
+        return cls(
+            access_policy_type=_enum(
+                d,
+                "access_policy_type",
+                AibiDashboardEmbeddingAccessPolicyAccessPolicyType,
+            )
+        )
 
 
 class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum):
 
-    ALLOW_ALL_DOMAINS = 'ALLOW_ALL_DOMAINS'
-    ALLOW_APPROVED_DOMAINS = 'ALLOW_APPROVED_DOMAINS'
-    DENY_ALL_DOMAINS = 'DENY_ALL_DOMAINS'
+    ALLOW_ALL_DOMAINS = "ALLOW_ALL_DOMAINS"
+    ALLOW_APPROVED_DOMAINS = "ALLOW_APPROVED_DOMAINS"
+    DENY_ALL_DOMAINS = "DENY_ALL_DOMAINS"
 
 
 @dataclass
@@ -108,29 +123,36 @@ def as_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aibi_dashboard_embedding_access_policy:
-            body[
-                'aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy.as_dict(
-                )
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.aibi_dashboard_embedding_access_policy:
-            body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicySetting:
         """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary."""
-        return cls(aibi_dashboard_embedding_access_policy=_from_dict(
-            d, 'aibi_dashboard_embedding_access_policy', AibiDashboardEmbeddingAccessPolicy),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            aibi_dashboard_embedding_access_policy=_from_dict(
+                d,
+                "aibi_dashboard_embedding_access_policy",
+                AibiDashboardEmbeddingAccessPolicy,
+            ),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -140,19 +162,21 @@ class AibiDashboardEmbeddingApprovedDomains:
     def as_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains]
+        if self.approved_domains:
+            body["approved_domains"] = [v for v in self.approved_domains]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.approved_domains: body['approved_domains'] = self.approved_domains
+        if self.approved_domains:
+            body["approved_domains"] = self.approved_domains
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomains:
         """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary."""
-        return cls(approved_domains=d.get('approved_domains', None))
+        return cls(approved_domains=d.get("approved_domains", None))
 
 
 @dataclass
@@ -177,29 +201,36 @@ def as_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aibi_dashboard_embedding_approved_domains:
-            body[
-                'aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains.as_dict(
-                )
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.aibi_dashboard_embedding_approved_domains:
-            body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomainsSetting:
         """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary."""
-        return cls(aibi_dashboard_embedding_approved_domains=_from_dict(
-            d, 'aibi_dashboard_embedding_approved_domains', AibiDashboardEmbeddingApprovedDomains),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            aibi_dashboard_embedding_approved_domains=_from_dict(
+                d,
+                "aibi_dashboard_embedding_approved_domains",
+                AibiDashboardEmbeddingApprovedDomains,
+            ),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -224,27 +255,36 @@ def as_dict(self) -> dict:
         """Serializes the AutomaticClusterUpdateSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.automatic_cluster_update_workspace:
-            body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AutomaticClusterUpdateSetting into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.automatic_cluster_update_workspace:
-            body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutomaticClusterUpdateSetting:
         """Deserializes the AutomaticClusterUpdateSetting from a dictionary."""
-        return cls(automatic_cluster_update_workspace=_from_dict(d, 'automatic_cluster_update_workspace',
-                                                                 ClusterAutoRestartMessage),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            automatic_cluster_update_workspace=_from_dict(
+                d,
+                "automatic_cluster_update_workspace",
+                ClusterAutoRestartMessage,
+            ),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -254,19 +294,21 @@ class BooleanMessage:
     def as_dict(self) -> dict:
         """Serializes the BooleanMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BooleanMessage:
         """Deserializes the BooleanMessage from a dictionary."""
-        return cls(value=d.get('value', None))
+        return cls(value=d.get("value", None))
 
 
 @dataclass
@@ -289,35 +331,51 @@ class ClusterAutoRestartMessage:
     def as_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.can_toggle is not None: body['can_toggle'] = self.can_toggle
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.enablement_details: body['enablement_details'] = self.enablement_details.as_dict()
-        if self.maintenance_window: body['maintenance_window'] = self.maintenance_window.as_dict()
+        if self.can_toggle is not None:
+            body["can_toggle"] = self.can_toggle
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.enablement_details:
+            body["enablement_details"] = self.enablement_details.as_dict()
+        if self.maintenance_window:
+            body["maintenance_window"] = self.maintenance_window.as_dict()
         if self.restart_even_if_no_updates_available is not None:
-            body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available
+            body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.can_toggle is not None: body['can_toggle'] = self.can_toggle
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.enablement_details: body['enablement_details'] = self.enablement_details
-        if self.maintenance_window: body['maintenance_window'] = self.maintenance_window
+        if self.can_toggle is not None:
+            body["can_toggle"] = self.can_toggle
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.enablement_details:
+            body["enablement_details"] = self.enablement_details
+        if self.maintenance_window:
+            body["maintenance_window"] = self.maintenance_window
         if self.restart_even_if_no_updates_available is not None:
-            body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available
+            body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessage:
         """Deserializes the ClusterAutoRestartMessage from a dictionary."""
-        return cls(can_toggle=d.get('can_toggle', None),
-                   enabled=d.get('enabled', None),
-                   enablement_details=_from_dict(d, 'enablement_details',
-                                                 ClusterAutoRestartMessageEnablementDetails),
-                   maintenance_window=_from_dict(d, 'maintenance_window',
-                                                 ClusterAutoRestartMessageMaintenanceWindow),
-                   restart_even_if_no_updates_available=d.get('restart_even_if_no_updates_available', None))
+        return cls(
+            can_toggle=d.get("can_toggle", None),
+            enabled=d.get("enabled", None),
+            enablement_details=_from_dict(
+                d,
+                "enablement_details",
+                ClusterAutoRestartMessageEnablementDetails,
+            ),
+            maintenance_window=_from_dict(
+                d,
+                "maintenance_window",
+                ClusterAutoRestartMessageMaintenanceWindow,
+            ),
+            restart_even_if_no_updates_available=d.get("restart_even_if_no_updates_available", None),
+        )
 
 
 @dataclass
@@ -342,30 +400,32 @@ def as_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageEnablementDetails into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.forced_for_compliance_mode is not None:
-            body['forced_for_compliance_mode'] = self.forced_for_compliance_mode
+            body["forced_for_compliance_mode"] = self.forced_for_compliance_mode
         if self.unavailable_for_disabled_entitlement is not None:
-            body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement
+            body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement
         if self.unavailable_for_non_enterprise_tier is not None:
-            body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier
+            body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.forced_for_compliance_mode is not None:
-            body['forced_for_compliance_mode'] = self.forced_for_compliance_mode
+            body["forced_for_compliance_mode"] = self.forced_for_compliance_mode
         if self.unavailable_for_disabled_entitlement is not None:
-            body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement
+            body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement
         if self.unavailable_for_non_enterprise_tier is not None:
-            body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier
+            body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageEnablementDetails:
         """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary."""
-        return cls(forced_for_compliance_mode=d.get('forced_for_compliance_mode', None),
-                   unavailable_for_disabled_entitlement=d.get('unavailable_for_disabled_entitlement', None),
-                   unavailable_for_non_enterprise_tier=d.get('unavailable_for_non_enterprise_tier', None))
+        return cls(
+            forced_for_compliance_mode=d.get("forced_for_compliance_mode", None),
+            unavailable_for_disabled_entitlement=d.get("unavailable_for_disabled_entitlement", None),
+            unavailable_for_non_enterprise_tier=d.get("unavailable_for_non_enterprise_tier", None),
+        )
 
 
 @dataclass
@@ -376,31 +436,37 @@ def as_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.week_day_based_schedule:
-            body['week_day_based_schedule'] = self.week_day_based_schedule.as_dict()
+            body["week_day_based_schedule"] = self.week_day_based_schedule.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule
+        if self.week_day_based_schedule:
+            body["week_day_based_schedule"] = self.week_day_based_schedule
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindow:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary."""
-        return cls(week_day_based_schedule=_from_dict(
-            d, 'week_day_based_schedule', ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule))
+        return cls(
+            week_day_based_schedule=_from_dict(
+                d,
+                "week_day_based_schedule",
+                ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule,
+            )
+        )
 
 
 class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum):
 
-    FRIDAY = 'FRIDAY'
-    MONDAY = 'MONDAY'
-    SATURDAY = 'SATURDAY'
-    SUNDAY = 'SUNDAY'
-    THURSDAY = 'THURSDAY'
-    TUESDAY = 'TUESDAY'
-    WEDNESDAY = 'WEDNESDAY'
+    FRIDAY = "FRIDAY"
+    MONDAY = "MONDAY"
+    SATURDAY = "SATURDAY"
+    SUNDAY = "SUNDAY"
+    THURSDAY = "THURSDAY"
+    TUESDAY = "TUESDAY"
+    WEDNESDAY = "WEDNESDAY"
 
 
 @dataclass
@@ -414,38 +480,56 @@ class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule:
     def as_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.day_of_week is not None: body['day_of_week'] = self.day_of_week.value
-        if self.frequency is not None: body['frequency'] = self.frequency.value
-        if self.window_start_time: body['window_start_time'] = self.window_start_time.as_dict()
+        if self.day_of_week is not None:
+            body["day_of_week"] = self.day_of_week.value
+        if self.frequency is not None:
+            body["frequency"] = self.frequency.value
+        if self.window_start_time:
+            body["window_start_time"] = self.window_start_time.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.day_of_week is not None: body['day_of_week'] = self.day_of_week
-        if self.frequency is not None: body['frequency'] = self.frequency
-        if self.window_start_time: body['window_start_time'] = self.window_start_time
+        if self.day_of_week is not None:
+            body["day_of_week"] = self.day_of_week
+        if self.frequency is not None:
+            body["frequency"] = self.frequency
+        if self.window_start_time:
+            body["window_start_time"] = self.window_start_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary."""
-        return cls(day_of_week=_enum(d, 'day_of_week', ClusterAutoRestartMessageMaintenanceWindowDayOfWeek),
-                   frequency=_enum(d, 'frequency',
-                                   ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency),
-                   window_start_time=_from_dict(d, 'window_start_time',
-                                                ClusterAutoRestartMessageMaintenanceWindowWindowStartTime))
+        return cls(
+            day_of_week=_enum(
+                d,
+                "day_of_week",
+                ClusterAutoRestartMessageMaintenanceWindowDayOfWeek,
+            ),
+            frequency=_enum(
+                d,
+                "frequency",
+                ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency,
+            ),
+            window_start_time=_from_dict(
+                d,
+                "window_start_time",
+                ClusterAutoRestartMessageMaintenanceWindowWindowStartTime,
+            ),
+        )
 
 
 class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum):
 
-    EVERY_WEEK = 'EVERY_WEEK'
-    FIRST_AND_THIRD_OF_MONTH = 'FIRST_AND_THIRD_OF_MONTH'
-    FIRST_OF_MONTH = 'FIRST_OF_MONTH'
-    FOURTH_OF_MONTH = 'FOURTH_OF_MONTH'
-    SECOND_AND_FOURTH_OF_MONTH = 'SECOND_AND_FOURTH_OF_MONTH'
-    SECOND_OF_MONTH = 'SECOND_OF_MONTH'
-    THIRD_OF_MONTH = 'THIRD_OF_MONTH'
+    EVERY_WEEK = "EVERY_WEEK"
+    FIRST_AND_THIRD_OF_MONTH = "FIRST_AND_THIRD_OF_MONTH"
+    FIRST_OF_MONTH = "FIRST_OF_MONTH"
+    FOURTH_OF_MONTH = "FOURTH_OF_MONTH"
+    SECOND_AND_FOURTH_OF_MONTH = "SECOND_AND_FOURTH_OF_MONTH"
+    SECOND_OF_MONTH = "SECOND_OF_MONTH"
+    THIRD_OF_MONTH = "THIRD_OF_MONTH"
 
 
 @dataclass
@@ -457,21 +541,25 @@ class ClusterAutoRestartMessageMaintenanceWindowWindowStartTime:
     def as_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.hours is not None: body['hours'] = self.hours
-        if self.minutes is not None: body['minutes'] = self.minutes
+        if self.hours is not None:
+            body["hours"] = self.hours
+        if self.minutes is not None:
+            body["minutes"] = self.minutes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.hours is not None: body['hours'] = self.hours
-        if self.minutes is not None: body['minutes'] = self.minutes
+        if self.hours is not None:
+            body["hours"] = self.hours
+        if self.minutes is not None:
+            body["minutes"] = self.minutes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary."""
-        return cls(hours=d.get('hours', None), minutes=d.get('minutes', None))
+        return cls(hours=d.get("hours", None), minutes=d.get("minutes", None))
 
 
 @dataclass
@@ -487,22 +575,27 @@ def as_dict(self) -> dict:
         """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.compliance_standards:
-            body['compliance_standards'] = [v.value for v in self.compliance_standards]
-        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+            body["compliance_standards"] = [v.value for v in self.compliance_standards]
+        if self.is_enabled is not None:
+            body["is_enabled"] = self.is_enabled
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
-        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        if self.compliance_standards:
+            body["compliance_standards"] = self.compliance_standards
+        if self.is_enabled is not None:
+            body["is_enabled"] = self.is_enabled
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile:
         """Deserializes the ComplianceSecurityProfile from a dictionary."""
-        return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard),
-                   is_enabled=d.get('is_enabled', None))
+        return cls(
+            compliance_standards=_repeated_enum(d, "compliance_standards", ComplianceStandard),
+            is_enabled=d.get("is_enabled", None),
+        )
 
 
 @dataclass
@@ -528,47 +621,53 @@ def as_dict(self) -> dict:
         """Serializes the ComplianceSecurityProfileSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.compliance_security_profile_workspace:
-            body[
-                'compliance_security_profile_workspace'] = self.compliance_security_profile_workspace.as_dict(
-                )
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["compliance_security_profile_workspace"] = self.compliance_security_profile_workspace.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ComplianceSecurityProfileSetting into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.compliance_security_profile_workspace:
-            body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["compliance_security_profile_workspace"] = self.compliance_security_profile_workspace
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting:
         """Deserializes the ComplianceSecurityProfileSetting from a dictionary."""
-        return cls(compliance_security_profile_workspace=_from_dict(d,
-                                                                    'compliance_security_profile_workspace',
-                                                                    ComplianceSecurityProfile),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            compliance_security_profile_workspace=_from_dict(
+                d,
+                "compliance_security_profile_workspace",
+                ComplianceSecurityProfile,
+            ),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 class ComplianceStandard(Enum):
     """Compliance stardard for SHIELD customers"""
 
-    CANADA_PROTECTED_B = 'CANADA_PROTECTED_B'
-    CYBER_ESSENTIAL_PLUS = 'CYBER_ESSENTIAL_PLUS'
-    FEDRAMP_HIGH = 'FEDRAMP_HIGH'
-    FEDRAMP_IL5 = 'FEDRAMP_IL5'
-    FEDRAMP_MODERATE = 'FEDRAMP_MODERATE'
-    HIPAA = 'HIPAA'
-    HITRUST = 'HITRUST'
-    IRAP_PROTECTED = 'IRAP_PROTECTED'
-    ISMAP = 'ISMAP'
-    ITAR_EAR = 'ITAR_EAR'
-    NONE = 'NONE'
-    PCI_DSS = 'PCI_DSS'
+    CANADA_PROTECTED_B = "CANADA_PROTECTED_B"
+    CYBER_ESSENTIAL_PLUS = "CYBER_ESSENTIAL_PLUS"
+    FEDRAMP_HIGH = "FEDRAMP_HIGH"
+    FEDRAMP_IL5 = "FEDRAMP_IL5"
+    FEDRAMP_MODERATE = "FEDRAMP_MODERATE"
+    HIPAA = "HIPAA"
+    HITRUST = "HITRUST"
+    IRAP_PROTECTED = "IRAP_PROTECTED"
+    ISMAP = "ISMAP"
+    ITAR_EAR = "ITAR_EAR"
+    NONE = "NONE"
+    PCI_DSS = "PCI_DSS"
 
 
 @dataclass
@@ -586,31 +685,43 @@ class Config:
     def as_dict(self) -> dict:
         """Serializes the Config into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.email: body['email'] = self.email.as_dict()
-        if self.generic_webhook: body['generic_webhook'] = self.generic_webhook.as_dict()
-        if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams.as_dict()
-        if self.pagerduty: body['pagerduty'] = self.pagerduty.as_dict()
-        if self.slack: body['slack'] = self.slack.as_dict()
+        if self.email:
+            body["email"] = self.email.as_dict()
+        if self.generic_webhook:
+            body["generic_webhook"] = self.generic_webhook.as_dict()
+        if self.microsoft_teams:
+            body["microsoft_teams"] = self.microsoft_teams.as_dict()
+        if self.pagerduty:
+            body["pagerduty"] = self.pagerduty.as_dict()
+        if self.slack:
+            body["slack"] = self.slack.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Config into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.email: body['email'] = self.email
-        if self.generic_webhook: body['generic_webhook'] = self.generic_webhook
-        if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams
-        if self.pagerduty: body['pagerduty'] = self.pagerduty
-        if self.slack: body['slack'] = self.slack
+        if self.email:
+            body["email"] = self.email
+        if self.generic_webhook:
+            body["generic_webhook"] = self.generic_webhook
+        if self.microsoft_teams:
+            body["microsoft_teams"] = self.microsoft_teams
+        if self.pagerduty:
+            body["pagerduty"] = self.pagerduty
+        if self.slack:
+            body["slack"] = self.slack
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Config:
         """Deserializes the Config from a dictionary."""
-        return cls(email=_from_dict(d, 'email', EmailConfig),
-                   generic_webhook=_from_dict(d, 'generic_webhook', GenericWebhookConfig),
-                   microsoft_teams=_from_dict(d, 'microsoft_teams', MicrosoftTeamsConfig),
-                   pagerduty=_from_dict(d, 'pagerduty', PagerdutyConfig),
-                   slack=_from_dict(d, 'slack', SlackConfig))
+        return cls(
+            email=_from_dict(d, "email", EmailConfig),
+            generic_webhook=_from_dict(d, "generic_webhook", GenericWebhookConfig),
+            microsoft_teams=_from_dict(d, "microsoft_teams", MicrosoftTeamsConfig),
+            pagerduty=_from_dict(d, "pagerduty", PagerdutyConfig),
+            slack=_from_dict(d, "slack", SlackConfig),
+        )
 
 
 @dataclass
@@ -631,25 +742,33 @@ class CreateIpAccessList:
     def as_dict(self) -> dict:
         """Serializes the CreateIpAccessList into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses]
-        if self.label is not None: body['label'] = self.label
-        if self.list_type is not None: body['list_type'] = self.list_type.value
+        if self.ip_addresses:
+            body["ip_addresses"] = [v for v in self.ip_addresses]
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_type is not None:
+            body["list_type"] = self.list_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
-        if self.label is not None: body['label'] = self.label
-        if self.list_type is not None: body['list_type'] = self.list_type
+        if self.ip_addresses:
+            body["ip_addresses"] = self.ip_addresses
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_type is not None:
+            body["list_type"] = self.list_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessList:
         """Deserializes the CreateIpAccessList from a dictionary."""
-        return cls(ip_addresses=d.get('ip_addresses', None),
-                   label=d.get('label', None),
-                   list_type=_enum(d, 'list_type', ListType))
+        return cls(
+            ip_addresses=d.get("ip_addresses", None),
+            label=d.get("label", None),
+            list_type=_enum(d, "list_type", ListType),
+        )
 
 
 @dataclass
@@ -662,19 +781,21 @@ class CreateIpAccessListResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateIpAccessListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateIpAccessListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessListResponse:
         """Deserializes the CreateIpAccessListResponse from a dictionary."""
-        return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo))
+        return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo))
 
 
 @dataclass
@@ -691,21 +812,25 @@ class CreateNetworkConnectivityConfigRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateNetworkConnectivityConfigRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.region is not None: body['region'] = self.region
+        if self.name is not None:
+            body["name"] = self.name
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateNetworkConnectivityConfigRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.region is not None: body['region'] = self.region
+        if self.name is not None:
+            body["name"] = self.name
+        if self.region is not None:
+            body["region"] = self.region
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest:
         """Deserializes the CreateNetworkConnectivityConfigRequest from a dictionary."""
-        return cls(name=d.get('name', None), region=d.get('region', None))
+        return cls(name=d.get("name", None), region=d.get("region", None))
 
 
 @dataclass
@@ -719,21 +844,28 @@ class CreateNotificationDestinationRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.config: body['config'] = self.config.as_dict()
-        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.config: body['config'] = self.config
-        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.config:
+            body["config"] = self.config
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest:
         """Deserializes the CreateNotificationDestinationRequest from a dictionary."""
-        return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None))
+        return cls(
+            config=_from_dict(d, "config", Config),
+            display_name=d.get("display_name", None),
+        )
 
 
 @dataclass
@@ -752,25 +884,33 @@ class CreateOboTokenRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateOboTokenRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.application_id is not None: body['application_id'] = self.application_id
-        if self.comment is not None: body['comment'] = self.comment
-        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        if self.application_id is not None:
+            body["application_id"] = self.application_id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.lifetime_seconds is not None:
+            body["lifetime_seconds"] = self.lifetime_seconds
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.application_id is not None: body['application_id'] = self.application_id
-        if self.comment is not None: body['comment'] = self.comment
-        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        if self.application_id is not None:
+            body["application_id"] = self.application_id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.lifetime_seconds is not None:
+            body["lifetime_seconds"] = self.lifetime_seconds
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenRequest:
         """Deserializes the CreateOboTokenRequest from a dictionary."""
-        return cls(application_id=d.get('application_id', None),
-                   comment=d.get('comment', None),
-                   lifetime_seconds=d.get('lifetime_seconds', None))
+        return cls(
+            application_id=d.get("application_id", None),
+            comment=d.get("comment", None),
+            lifetime_seconds=d.get("lifetime_seconds", None),
+        )
 
 
 @dataclass
@@ -785,21 +925,28 @@ class CreateOboTokenResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateOboTokenResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.token_info: body['token_info'] = self.token_info.as_dict()
-        if self.token_value is not None: body['token_value'] = self.token_value
+        if self.token_info:
+            body["token_info"] = self.token_info.as_dict()
+        if self.token_value is not None:
+            body["token_value"] = self.token_value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateOboTokenResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.token_info: body['token_info'] = self.token_info
-        if self.token_value is not None: body['token_value'] = self.token_value
+        if self.token_info:
+            body["token_info"] = self.token_info
+        if self.token_value is not None:
+            body["token_value"] = self.token_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenResponse:
         """Deserializes the CreateOboTokenResponse from a dictionary."""
-        return cls(token_info=_from_dict(d, 'token_info', TokenInfo), token_value=d.get('token_value', None))
+        return cls(
+            token_info=_from_dict(d, "token_info", TokenInfo),
+            token_value=d.get("token_value", None),
+        )
 
 
 @dataclass
@@ -817,37 +964,44 @@ class CreatePrivateEndpointRuleRequest:
     def as_dict(self) -> dict:
         """Serializes the CreatePrivateEndpointRuleRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_id is not None: body['group_id'] = self.group_id.value
+        if self.group_id is not None:
+            body["group_id"] = self.group_id.value
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreatePrivateEndpointRuleRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_id is not None: body['group_id'] = self.group_id
+        if self.group_id is not None:
+            body["group_id"] = self.group_id
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePrivateEndpointRuleRequest:
         """Deserializes the CreatePrivateEndpointRuleRequest from a dictionary."""
-        return cls(group_id=_enum(d, 'group_id', CreatePrivateEndpointRuleRequestGroupId),
-                   network_connectivity_config_id=d.get('network_connectivity_config_id', None),
-                   resource_id=d.get('resource_id', None))
+        return cls(
+            group_id=_enum(d, "group_id", CreatePrivateEndpointRuleRequestGroupId),
+            network_connectivity_config_id=d.get("network_connectivity_config_id", None),
+            resource_id=d.get("resource_id", None),
+        )
 
 
 class CreatePrivateEndpointRuleRequestGroupId(Enum):
     """The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
-    storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`."""
+    storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
+    """
 
-    BLOB = 'blob'
-    DFS = 'dfs'
-    MYSQL_SERVER = 'mysqlServer'
-    SQL_SERVER = 'sqlServer'
+    BLOB = "blob"
+    DFS = "dfs"
+    MYSQL_SERVER = "mysqlServer"
+    SQL_SERVER = "sqlServer"
 
 
 @dataclass
@@ -863,21 +1017,28 @@ class CreateTokenRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateTokenRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.lifetime_seconds is not None:
+            body["lifetime_seconds"] = self.lifetime_seconds
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.lifetime_seconds is not None:
+            body["lifetime_seconds"] = self.lifetime_seconds
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTokenRequest:
         """Deserializes the CreateTokenRequest from a dictionary."""
-        return cls(comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None))
+        return cls(
+            comment=d.get("comment", None),
+            lifetime_seconds=d.get("lifetime_seconds", None),
+        )
 
 
 @dataclass
@@ -891,22 +1052,28 @@ class CreateTokenResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateTokenResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.token_info: body['token_info'] = self.token_info.as_dict()
-        if self.token_value is not None: body['token_value'] = self.token_value
+        if self.token_info:
+            body["token_info"] = self.token_info.as_dict()
+        if self.token_value is not None:
+            body["token_value"] = self.token_value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateTokenResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.token_info: body['token_info'] = self.token_info
-        if self.token_value is not None: body['token_value'] = self.token_value
+        if self.token_info:
+            body["token_info"] = self.token_info
+        if self.token_value is not None:
+            body["token_value"] = self.token_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTokenResponse:
         """Deserializes the CreateTokenResponse from a dictionary."""
-        return cls(token_info=_from_dict(d, 'token_info', PublicTokenInfo),
-                   token_value=d.get('token_value', None))
+        return cls(
+            token_info=_from_dict(d, "token_info", PublicTokenInfo),
+            token_value=d.get("token_value", None),
+        )
 
 
 @dataclass
@@ -924,22 +1091,27 @@ def as_dict(self) -> dict:
         """Serializes the CspEnablementAccount into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.compliance_standards:
-            body['compliance_standards'] = [v.value for v in self.compliance_standards]
-        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+            body["compliance_standards"] = [v.value for v in self.compliance_standards]
+        if self.is_enforced is not None:
+            body["is_enforced"] = self.is_enforced
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CspEnablementAccount into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
-        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        if self.compliance_standards:
+            body["compliance_standards"] = self.compliance_standards
+        if self.is_enforced is not None:
+            body["is_enforced"] = self.is_enforced
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccount:
         """Deserializes the CspEnablementAccount from a dictionary."""
-        return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard),
-                   is_enforced=d.get('is_enforced', None))
+        return cls(
+            compliance_standards=_repeated_enum(d, "compliance_standards", ComplianceStandard),
+            is_enforced=d.get("is_enforced", None),
+        )
 
 
 @dataclass
@@ -964,25 +1136,33 @@ class CspEnablementAccountSetting:
     def as_dict(self) -> dict:
         """Serializes the CspEnablementAccountSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.csp_enablement_account:
+            body["csp_enablement_account"] = self.csp_enablement_account.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CspEnablementAccountSetting into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.csp_enablement_account:
+            body["csp_enablement_account"] = self.csp_enablement_account
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccountSetting:
         """Deserializes the CspEnablementAccountSetting from a dictionary."""
-        return cls(csp_enablement_account=_from_dict(d, 'csp_enablement_account', CspEnablementAccount),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            csp_enablement_account=_from_dict(d, "csp_enablement_account", CspEnablementAccount),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1014,25 +1194,33 @@ class DefaultNamespaceSetting:
     def as_dict(self) -> dict:
         """Serializes the DefaultNamespaceSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.namespace: body['namespace'] = self.namespace.as_dict()
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.namespace:
+            body["namespace"] = self.namespace.as_dict()
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DefaultNamespaceSetting into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.namespace: body['namespace'] = self.namespace
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.namespace:
+            body["namespace"] = self.namespace
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting:
         """Deserializes the DefaultNamespaceSetting from a dictionary."""
-        return cls(etag=d.get('etag', None),
-                   namespace=_from_dict(d, 'namespace', StringMessage),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            etag=d.get("etag", None),
+            namespace=_from_dict(d, "namespace", StringMessage),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1050,19 +1238,21 @@ class DeleteAccountIpAccessEnableResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteAccountIpAccessEnableResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteAccountIpAccessEnableResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAccountIpAccessEnableResponse:
         """Deserializes the DeleteAccountIpAccessEnableResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1080,19 +1270,21 @@ class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
         """Deserializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1110,19 +1302,21 @@ class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
         """Deserializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1140,19 +1334,21 @@ class DeleteDefaultNamespaceSettingResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteDefaultNamespaceSettingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDefaultNamespaceSettingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDefaultNamespaceSettingResponse:
         """Deserializes the DeleteDefaultNamespaceSettingResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1170,19 +1366,21 @@ class DeleteDisableLegacyAccessResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteDisableLegacyAccessResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDisableLegacyAccessResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse:
         """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1200,19 +1398,21 @@ class DeleteDisableLegacyDbfsResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteDisableLegacyDbfsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDisableLegacyDbfsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse:
         """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1230,19 +1430,21 @@ class DeleteDisableLegacyFeaturesResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteDisableLegacyFeaturesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDisableLegacyFeaturesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyFeaturesResponse:
         """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1279,19 +1481,21 @@ class DeletePersonalComputeSettingResponse:
     def as_dict(self) -> dict:
         """Serializes the DeletePersonalComputeSettingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeletePersonalComputeSettingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePersonalComputeSettingResponse:
         """Deserializes the DeletePersonalComputeSettingResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 @dataclass
@@ -1328,28 +1532,30 @@ class DeleteRestrictWorkspaceAdminsSettingResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingResponse:
         """Deserializes the DeleteRestrictWorkspaceAdminsSettingResponse from a dictionary."""
-        return cls(etag=d.get('etag', None))
+        return cls(etag=d.get("etag", None))
 
 
 class DestinationType(Enum):
 
-    EMAIL = 'EMAIL'
-    MICROSOFT_TEAMS = 'MICROSOFT_TEAMS'
-    PAGERDUTY = 'PAGERDUTY'
-    SLACK = 'SLACK'
-    WEBHOOK = 'WEBHOOK'
+    EMAIL = "EMAIL"
+    MICROSOFT_TEAMS = "MICROSOFT_TEAMS"
+    PAGERDUTY = "PAGERDUTY"
+    SLACK = "SLACK"
+    WEBHOOK = "WEBHOOK"
 
 
 @dataclass
@@ -1373,25 +1579,33 @@ class DisableLegacyAccess:
     def as_dict(self) -> dict:
         """Serializes the DisableLegacyAccess into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.disable_legacy_access:
+            body["disable_legacy_access"] = self.disable_legacy_access.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DisableLegacyAccess into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.disable_legacy_access:
+            body["disable_legacy_access"] = self.disable_legacy_access
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess:
         """Deserializes the DisableLegacyAccess from a dictionary."""
-        return cls(disable_legacy_access=_from_dict(d, 'disable_legacy_access', BooleanMessage),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            disable_legacy_access=_from_dict(d, "disable_legacy_access", BooleanMessage),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1415,25 +1629,33 @@ class DisableLegacyDbfs:
     def as_dict(self) -> dict:
         """Serializes the DisableLegacyDbfs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.disable_legacy_dbfs:
+            body["disable_legacy_dbfs"] = self.disable_legacy_dbfs.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DisableLegacyDbfs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.disable_legacy_dbfs:
+            body["disable_legacy_dbfs"] = self.disable_legacy_dbfs
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs:
         """Deserializes the DisableLegacyDbfs from a dictionary."""
-        return cls(disable_legacy_dbfs=_from_dict(d, 'disable_legacy_dbfs', BooleanMessage),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            disable_legacy_dbfs=_from_dict(d, "disable_legacy_dbfs", BooleanMessage),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1458,25 +1680,32 @@ def as_dict(self) -> dict:
         """Serializes the DisableLegacyFeatures into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.disable_legacy_features:
-            body['disable_legacy_features'] = self.disable_legacy_features.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["disable_legacy_features"] = self.disable_legacy_features.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DisableLegacyFeatures into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.disable_legacy_features:
+            body["disable_legacy_features"] = self.disable_legacy_features
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
         """Deserializes the DisableLegacyFeatures from a dictionary."""
-        return cls(disable_legacy_features=_from_dict(d, 'disable_legacy_features', BooleanMessage),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            disable_legacy_features=_from_dict(d, "disable_legacy_features", BooleanMessage),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1491,28 +1720,28 @@ class EgressNetworkPolicy:
     def as_dict(self) -> dict:
         """Serializes the EgressNetworkPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.internet_access: body['internet_access'] = self.internet_access.as_dict()
+        if self.internet_access:
+            body["internet_access"] = self.internet_access.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EgressNetworkPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.internet_access: body['internet_access'] = self.internet_access
+        if self.internet_access:
+            body["internet_access"] = self.internet_access
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicy:
         """Deserializes the EgressNetworkPolicy from a dictionary."""
-        return cls(internet_access=_from_dict(d, 'internet_access', EgressNetworkPolicyInternetAccessPolicy))
+        return cls(internet_access=_from_dict(d, "internet_access", EgressNetworkPolicyInternetAccessPolicy))
 
 
 @dataclass
 class EgressNetworkPolicyInternetAccessPolicy:
-    allowed_internet_destinations: Optional[
-        List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None
+    allowed_internet_destinations: Optional[List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None
 
-    allowed_storage_destinations: Optional[
-        List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None
+    allowed_storage_destinations: Optional[List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None
 
     log_only_mode: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyMode] = None
     """Optional. If not specified, assume the policy is enforced for all workloads."""
@@ -1528,36 +1757,53 @@ def as_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allowed_internet_destinations:
-            body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations]
+            body["allowed_internet_destinations"] = [v.as_dict() for v in self.allowed_internet_destinations]
         if self.allowed_storage_destinations:
-            body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations]
-        if self.log_only_mode: body['log_only_mode'] = self.log_only_mode.as_dict()
-        if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value
+            body["allowed_storage_destinations"] = [v.as_dict() for v in self.allowed_storage_destinations]
+        if self.log_only_mode:
+            body["log_only_mode"] = self.log_only_mode.as_dict()
+        if self.restriction_mode is not None:
+            body["restriction_mode"] = self.restriction_mode.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicy into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.allowed_internet_destinations:
-            body['allowed_internet_destinations'] = self.allowed_internet_destinations
+            body["allowed_internet_destinations"] = self.allowed_internet_destinations
         if self.allowed_storage_destinations:
-            body['allowed_storage_destinations'] = self.allowed_storage_destinations
-        if self.log_only_mode: body['log_only_mode'] = self.log_only_mode
-        if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode
+            body["allowed_storage_destinations"] = self.allowed_storage_destinations
+        if self.log_only_mode:
+            body["log_only_mode"] = self.log_only_mode
+        if self.restriction_mode is not None:
+            body["restriction_mode"] = self.restriction_mode
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicy:
         """Deserializes the EgressNetworkPolicyInternetAccessPolicy from a dictionary."""
-        return cls(allowed_internet_destinations=_repeated_dict(
-            d, 'allowed_internet_destinations', EgressNetworkPolicyInternetAccessPolicyInternetDestination),
-                   allowed_storage_destinations=_repeated_dict(
-                       d, 'allowed_storage_destinations',
-                       EgressNetworkPolicyInternetAccessPolicyStorageDestination),
-                   log_only_mode=_from_dict(d, 'log_only_mode',
-                                            EgressNetworkPolicyInternetAccessPolicyLogOnlyMode),
-                   restriction_mode=_enum(d, 'restriction_mode',
-                                          EgressNetworkPolicyInternetAccessPolicyRestrictionMode))
+        return cls(
+            allowed_internet_destinations=_repeated_dict(
+                d,
+                "allowed_internet_destinations",
+                EgressNetworkPolicyInternetAccessPolicyInternetDestination,
+            ),
+            allowed_storage_destinations=_repeated_dict(
+                d,
+                "allowed_storage_destinations",
+                EgressNetworkPolicyInternetAccessPolicyStorageDestination,
+            ),
+            log_only_mode=_from_dict(
+                d,
+                "log_only_mode",
+                EgressNetworkPolicyInternetAccessPolicyLogOnlyMode,
+            ),
+            restriction_mode=_enum(
+                d,
+                "restriction_mode",
+                EgressNetworkPolicyInternetAccessPolicyRestrictionMode,
+            ),
+        )
 
 
 @dataclass
@@ -1569,7 +1815,8 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestination:
     destination: Optional[str] = None
 
     protocol: Optional[
-        EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol] = None
+        EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
+    ] = None
     """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP
     filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be
     set to TCP by default and hidden from the user. In the future, users may be able to select HTTP
@@ -1580,30 +1827,41 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestination:
     def as_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
-        if self.protocol is not None: body['protocol'] = self.protocol.value
-        if self.type is not None: body['type'] = self.type.value
+        if self.destination is not None:
+            body["destination"] = self.destination
+        if self.protocol is not None:
+            body["protocol"] = self.protocol.value
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination is not None: body['destination'] = self.destination
-        if self.protocol is not None: body['protocol'] = self.protocol
-        if self.type is not None: body['type'] = self.type
+        if self.destination is not None:
+            body["destination"] = self.destination
+        if self.protocol is not None:
+            body["protocol"] = self.protocol
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyInternetDestination:
         """Deserializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination from a dictionary."""
         return cls(
-            destination=d.get('destination', None),
+            destination=d.get("destination", None),
             protocol=_enum(
-                d, 'protocol',
-                EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
+                d,
+                "protocol",
+                EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol,
             ),
-            type=_enum(d, 'type',
-                       EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType))
+            type=_enum(
+                d,
+                "type",
+                EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType,
+            ),
+        )
 
 
 class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol(Enum):
@@ -1612,12 +1870,12 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinat
     set to TCP by default and hidden from the user. In the future, users may be able to select HTTP
     filtering (i.e. SNI based filtering, filtering by FQDN)."""
 
-    TCP = 'TCP'
+    TCP = "TCP"
 
 
 class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType(Enum):
 
-    FQDN = 'FQDN'
+    FQDN = "FQDN"
 
 
 @dataclass
@@ -1629,37 +1887,49 @@ class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode:
     def as_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type.value
-        if self.workloads: body['workloads'] = [v.value for v in self.workloads]
+        if self.log_only_mode_type is not None:
+            body["log_only_mode_type"] = self.log_only_mode_type.value
+        if self.workloads:
+            body["workloads"] = [v.value for v in self.workloads]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type
-        if self.workloads: body['workloads'] = self.workloads
+        if self.log_only_mode_type is not None:
+            body["log_only_mode_type"] = self.log_only_mode_type
+        if self.workloads:
+            body["workloads"] = self.workloads
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyLogOnlyMode:
         """Deserializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode from a dictionary."""
-        return cls(log_only_mode_type=_enum(
-            d, 'log_only_mode_type', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType),
-                   workloads=_repeated_enum(d, 'workloads',
-                                            EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType))
+        return cls(
+            log_only_mode_type=_enum(
+                d,
+                "log_only_mode_type",
+                EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType,
+            ),
+            workloads=_repeated_enum(
+                d,
+                "workloads",
+                EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType,
+            ),
+        )
 
 
 class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum):
 
-    ALL_SERVICES = 'ALL_SERVICES'
-    SELECTED_SERVICES = 'SELECTED_SERVICES'
+    ALL_SERVICES = "ALL_SERVICES"
+    SELECTED_SERVICES = "SELECTED_SERVICES"
 
 
 class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType(Enum):
     """The values should match the list of workloads used in networkconfig.proto"""
 
-    DBSQL = 'DBSQL'
-    ML_SERVING = 'ML_SERVING'
+    DBSQL = "DBSQL"
+    ML_SERVING = "ML_SERVING"
 
 
 class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum):
@@ -1669,9 +1939,9 @@ class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum):
     external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via
     private link."""
 
-    FULL_ACCESS = 'FULL_ACCESS'
-    PRIVATE_ACCESS_ONLY = 'PRIVATE_ACCESS_ONLY'
-    RESTRICTED_ACCESS = 'RESTRICTED_ACCESS'
+    FULL_ACCESS = "FULL_ACCESS"
+    PRIVATE_ACCESS_ONLY = "PRIVATE_ACCESS_ONLY"
+    RESTRICTED_ACCESS = "RESTRICTED_ACCESS"
 
 
 @dataclass
@@ -1697,50 +1967,70 @@ class EgressNetworkPolicyInternetAccessPolicyStorageDestination:
     def as_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allowed_paths: body['allowed_paths'] = [v for v in self.allowed_paths]
-        if self.azure_container is not None: body['azure_container'] = self.azure_container
-        if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone
-        if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account
-        if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service
-        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
-        if self.region is not None: body['region'] = self.region
-        if self.type is not None: body['type'] = self.type.value
+        if self.allowed_paths:
+            body["allowed_paths"] = [v for v in self.allowed_paths]
+        if self.azure_container is not None:
+            body["azure_container"] = self.azure_container
+        if self.azure_dns_zone is not None:
+            body["azure_dns_zone"] = self.azure_dns_zone
+        if self.azure_storage_account is not None:
+            body["azure_storage_account"] = self.azure_storage_account
+        if self.azure_storage_service is not None:
+            body["azure_storage_service"] = self.azure_storage_service
+        if self.bucket_name is not None:
+            body["bucket_name"] = self.bucket_name
+        if self.region is not None:
+            body["region"] = self.region
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allowed_paths: body['allowed_paths'] = self.allowed_paths
-        if self.azure_container is not None: body['azure_container'] = self.azure_container
-        if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone
-        if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account
-        if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service
-        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
-        if self.region is not None: body['region'] = self.region
-        if self.type is not None: body['type'] = self.type
+        if self.allowed_paths:
+            body["allowed_paths"] = self.allowed_paths
+        if self.azure_container is not None:
+            body["azure_container"] = self.azure_container
+        if self.azure_dns_zone is not None:
+            body["azure_dns_zone"] = self.azure_dns_zone
+        if self.azure_storage_account is not None:
+            body["azure_storage_account"] = self.azure_storage_account
+        if self.azure_storage_service is not None:
+            body["azure_storage_service"] = self.azure_storage_service
+        if self.bucket_name is not None:
+            body["bucket_name"] = self.bucket_name
+        if self.region is not None:
+            body["region"] = self.region
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyStorageDestination:
         """Deserializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination from a dictionary."""
-        return cls(allowed_paths=d.get('allowed_paths', None),
-                   azure_container=d.get('azure_container', None),
-                   azure_dns_zone=d.get('azure_dns_zone', None),
-                   azure_storage_account=d.get('azure_storage_account', None),
-                   azure_storage_service=d.get('azure_storage_service', None),
-                   bucket_name=d.get('bucket_name', None),
-                   region=d.get('region', None),
-                   type=_enum(
-                       d, 'type',
-                       EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType))
+        return cls(
+            allowed_paths=d.get("allowed_paths", None),
+            azure_container=d.get("azure_container", None),
+            azure_dns_zone=d.get("azure_dns_zone", None),
+            azure_storage_account=d.get("azure_storage_account", None),
+            azure_storage_service=d.get("azure_storage_service", None),
+            bucket_name=d.get("bucket_name", None),
+            region=d.get("region", None),
+            type=_enum(
+                d,
+                "type",
+                EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType,
+            ),
+        )
 
 
 class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum):
 
-    AWS_S3 = 'AWS_S3'
-    AZURE_STORAGE = 'AZURE_STORAGE'
-    CLOUDFLARE_R2 = 'CLOUDFLARE_R2'
-    GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE'
+    AWS_S3 = "AWS_S3"
+    AZURE_STORAGE = "AZURE_STORAGE"
+    CLOUDFLARE_R2 = "CLOUDFLARE_R2"
+    GOOGLE_CLOUD_STORAGE = "GOOGLE_CLOUD_STORAGE"
 
 
 @dataclass
@@ -1751,19 +2041,21 @@ class EmailConfig:
     def as_dict(self) -> dict:
         """Serializes the EmailConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.addresses: body['addresses'] = [v for v in self.addresses]
+        if self.addresses:
+            body["addresses"] = [v for v in self.addresses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EmailConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.addresses: body['addresses'] = self.addresses
+        if self.addresses:
+            body["addresses"] = self.addresses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmailConfig:
         """Deserializes the EmailConfig from a dictionary."""
-        return cls(addresses=d.get('addresses', None))
+        return cls(addresses=d.get("addresses", None))
 
 
 @dataclass
@@ -1794,19 +2086,21 @@ class EnhancedSecurityMonitoring:
     def as_dict(self) -> dict:
         """Serializes the EnhancedSecurityMonitoring into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        if self.is_enabled is not None:
+            body["is_enabled"] = self.is_enabled
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnhancedSecurityMonitoring into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        if self.is_enabled is not None:
+            body["is_enabled"] = self.is_enabled
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoring:
         """Deserializes the EnhancedSecurityMonitoring from a dictionary."""
-        return cls(is_enabled=d.get('is_enabled', None))
+        return cls(is_enabled=d.get("is_enabled", None))
 
 
 @dataclass
@@ -1832,29 +2126,36 @@ def as_dict(self) -> dict:
         """Serializes the EnhancedSecurityMonitoringSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.enhanced_security_monitoring_workspace:
-            body[
-                'enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace.as_dict(
-                )
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["enhanced_security_monitoring_workspace"] = self.enhanced_security_monitoring_workspace.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnhancedSecurityMonitoringSetting into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.enhanced_security_monitoring_workspace:
-            body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["enhanced_security_monitoring_workspace"] = self.enhanced_security_monitoring_workspace
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoringSetting:
         """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary."""
-        return cls(enhanced_security_monitoring_workspace=_from_dict(
-            d, 'enhanced_security_monitoring_workspace', EnhancedSecurityMonitoring),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            enhanced_security_monitoring_workspace=_from_dict(
+                d,
+                "enhanced_security_monitoring_workspace",
+                EnhancedSecurityMonitoring,
+            ),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1866,19 +2167,21 @@ class EsmEnablementAccount:
     def as_dict(self) -> dict:
         """Serializes the EsmEnablementAccount into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        if self.is_enforced is not None:
+            body["is_enforced"] = self.is_enforced
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EsmEnablementAccount into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        if self.is_enforced is not None:
+            body["is_enforced"] = self.is_enforced
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccount:
         """Deserializes the EsmEnablementAccount from a dictionary."""
-        return cls(is_enforced=d.get('is_enforced', None))
+        return cls(is_enforced=d.get("is_enforced", None))
 
 
 @dataclass
@@ -1903,25 +2206,33 @@ class EsmEnablementAccountSetting:
     def as_dict(self) -> dict:
         """Serializes the EsmEnablementAccountSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account.as_dict()
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.esm_enablement_account:
+            body["esm_enablement_account"] = self.esm_enablement_account.as_dict()
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EsmEnablementAccountSetting into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account
-        if self.etag is not None: body['etag'] = self.etag
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.esm_enablement_account:
+            body["esm_enablement_account"] = self.esm_enablement_account
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccountSetting:
         """Deserializes the EsmEnablementAccountSetting from a dictionary."""
-        return cls(esm_enablement_account=_from_dict(d, 'esm_enablement_account', EsmEnablementAccount),
-                   etag=d.get('etag', None),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            esm_enablement_account=_from_dict(d, "esm_enablement_account", EsmEnablementAccount),
+            etag=d.get("etag", None),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -1946,31 +2257,43 @@ class ExchangeToken:
     def as_dict(self) -> dict:
         """Serializes the ExchangeToken into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential is not None: body['credential'] = self.credential
-        if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time
-        if self.owner_id is not None: body['ownerId'] = self.owner_id
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
-        if self.token_type is not None: body['tokenType'] = self.token_type.value
+        if self.credential is not None:
+            body["credential"] = self.credential
+        if self.credential_eol_time is not None:
+            body["credentialEolTime"] = self.credential_eol_time
+        if self.owner_id is not None:
+            body["ownerId"] = self.owner_id
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
+        if self.token_type is not None:
+            body["tokenType"] = self.token_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExchangeToken into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential is not None: body['credential'] = self.credential
-        if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time
-        if self.owner_id is not None: body['ownerId'] = self.owner_id
-        if self.scopes: body['scopes'] = self.scopes
-        if self.token_type is not None: body['tokenType'] = self.token_type
+        if self.credential is not None:
+            body["credential"] = self.credential
+        if self.credential_eol_time is not None:
+            body["credentialEolTime"] = self.credential_eol_time
+        if self.owner_id is not None:
+            body["ownerId"] = self.owner_id
+        if self.scopes:
+            body["scopes"] = self.scopes
+        if self.token_type is not None:
+            body["tokenType"] = self.token_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeToken:
         """Deserializes the ExchangeToken from a dictionary."""
-        return cls(credential=d.get('credential', None),
-                   credential_eol_time=d.get('credentialEolTime', None),
-                   owner_id=d.get('ownerId', None),
-                   scopes=d.get('scopes', None),
-                   token_type=_enum(d, 'tokenType', TokenType))
+        return cls(
+            credential=d.get("credential", None),
+            credential_eol_time=d.get("credentialEolTime", None),
+            owner_id=d.get("ownerId", None),
+            scopes=d.get("scopes", None),
+            token_type=_enum(d, "tokenType", TokenType),
+        )
 
 
 @dataclass
@@ -1989,25 +2312,33 @@ class ExchangeTokenRequest:
     def as_dict(self) -> dict:
         """Serializes the ExchangeTokenRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.partition_id: body['partitionId'] = self.partition_id.as_dict()
-        if self.scopes: body['scopes'] = [v for v in self.scopes]
-        if self.token_type: body['tokenType'] = [v.value for v in self.token_type]
+        if self.partition_id:
+            body["partitionId"] = self.partition_id.as_dict()
+        if self.scopes:
+            body["scopes"] = [v for v in self.scopes]
+        if self.token_type:
+            body["tokenType"] = [v.value for v in self.token_type]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.partition_id: body['partitionId'] = self.partition_id
-        if self.scopes: body['scopes'] = self.scopes
-        if self.token_type: body['tokenType'] = self.token_type
+        if self.partition_id:
+            body["partitionId"] = self.partition_id
+        if self.scopes:
+            body["scopes"] = self.scopes
+        if self.token_type:
+            body["tokenType"] = self.token_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenRequest:
         """Deserializes the ExchangeTokenRequest from a dictionary."""
-        return cls(partition_id=_from_dict(d, 'partitionId', PartitionId),
-                   scopes=d.get('scopes', None),
-                   token_type=_repeated_enum(d, 'tokenType', TokenType))
+        return cls(
+            partition_id=_from_dict(d, "partitionId", PartitionId),
+            scopes=d.get("scopes", None),
+            token_type=_repeated_enum(d, "tokenType", TokenType),
+        )
 
 
 @dataclass
@@ -2019,19 +2350,21 @@ class ExchangeTokenResponse:
     def as_dict(self) -> dict:
         """Serializes the ExchangeTokenResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.values: body['values'] = [v.as_dict() for v in self.values]
+        if self.values:
+            body["values"] = [v.as_dict() for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExchangeTokenResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.values: body['values'] = self.values
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenResponse:
         """Deserializes the ExchangeTokenResponse from a dictionary."""
-        return cls(values=_repeated_dict(d, 'values', ExchangeToken))
+        return cls(values=_repeated_dict(d, "values", ExchangeToken))
 
 
 @dataclass
@@ -2044,19 +2377,21 @@ class FetchIpAccessListResponse:
     def as_dict(self) -> dict:
         """Serializes the FetchIpAccessListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the FetchIpAccessListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse:
         """Deserializes the FetchIpAccessListResponse from a dictionary."""
-        return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo))
+        return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo))
 
 
 @dataclass
@@ -2082,34 +2417,48 @@ class GenericWebhookConfig:
     def as_dict(self) -> dict:
         """Serializes the GenericWebhookConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.password is not None: body['password'] = self.password
-        if self.password_set is not None: body['password_set'] = self.password_set
-        if self.url is not None: body['url'] = self.url
-        if self.url_set is not None: body['url_set'] = self.url_set
-        if self.username is not None: body['username'] = self.username
-        if self.username_set is not None: body['username_set'] = self.username_set
+        if self.password is not None:
+            body["password"] = self.password
+        if self.password_set is not None:
+            body["password_set"] = self.password_set
+        if self.url is not None:
+            body["url"] = self.url
+        if self.url_set is not None:
+            body["url_set"] = self.url_set
+        if self.username is not None:
+            body["username"] = self.username
+        if self.username_set is not None:
+            body["username_set"] = self.username_set
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GenericWebhookConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.password is not None: body['password'] = self.password
-        if self.password_set is not None: body['password_set'] = self.password_set
-        if self.url is not None: body['url'] = self.url
-        if self.url_set is not None: body['url_set'] = self.url_set
-        if self.username is not None: body['username'] = self.username
-        if self.username_set is not None: body['username_set'] = self.username_set
+        if self.password is not None:
+            body["password"] = self.password
+        if self.password_set is not None:
+            body["password_set"] = self.password_set
+        if self.url is not None:
+            body["url"] = self.url
+        if self.url_set is not None:
+            body["url_set"] = self.url_set
+        if self.username is not None:
+            body["username"] = self.username
+        if self.username_set is not None:
+            body["username_set"] = self.username_set
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig:
         """Deserializes the GenericWebhookConfig from a dictionary."""
-        return cls(password=d.get('password', None),
-                   password_set=d.get('password_set', None),
-                   url=d.get('url', None),
-                   url_set=d.get('url_set', None),
-                   username=d.get('username', None),
-                   username_set=d.get('username_set', None))
+        return cls(
+            password=d.get("password", None),
+            password_set=d.get("password_set", None),
+            url=d.get("url", None),
+            url_set=d.get("url_set", None),
+            username=d.get("username", None),
+            username_set=d.get("username_set", None),
+        )
 
 
 @dataclass
@@ -2120,19 +2469,21 @@ class GetIpAccessListResponse:
     def as_dict(self) -> dict:
         """Serializes the GetIpAccessListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetIpAccessListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListResponse:
         """Deserializes the GetIpAccessListResponse from a dictionary."""
-        return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo))
+        return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo))
 
 
 @dataclass
@@ -2144,19 +2495,21 @@ class GetIpAccessListsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetIpAccessListsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists]
+        if self.ip_access_lists:
+            body["ip_access_lists"] = [v.as_dict() for v in self.ip_access_lists]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetIpAccessListsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists
+        if self.ip_access_lists:
+            body["ip_access_lists"] = self.ip_access_lists
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListsResponse:
         """Deserializes the GetIpAccessListsResponse from a dictionary."""
-        return cls(ip_access_lists=_repeated_dict(d, 'ip_access_lists', IpAccessListInfo))
+        return cls(ip_access_lists=_repeated_dict(d, "ip_access_lists", IpAccessListInfo))
 
 
 @dataclass
@@ -2167,19 +2520,21 @@ class GetTokenPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetTokenPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetTokenPermissionLevelsResponse:
         """Deserializes the GetTokenPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', TokenPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", TokenPermissionsDescription))
 
 
 @dataclass
@@ -2191,19 +2546,21 @@ class GetTokenResponse:
     def as_dict(self) -> dict:
         """Serializes the GetTokenResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.token_info: body['token_info'] = self.token_info.as_dict()
+        if self.token_info:
+            body["token_info"] = self.token_info.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.token_info: body['token_info'] = self.token_info
+        if self.token_info:
+            body["token_info"] = self.token_info
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetTokenResponse:
         """Deserializes the GetTokenResponse from a dictionary."""
-        return cls(token_info=_from_dict(d, 'token_info', TokenInfo))
+        return cls(token_info=_from_dict(d, "token_info", TokenInfo))
 
 
 @dataclass
@@ -2245,46 +2602,68 @@ class IpAccessListInfo:
     def as_dict(self) -> dict:
         """Serializes the IpAccessListInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.address_count is not None: body['address_count'] = self.address_count
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses]
-        if self.label is not None: body['label'] = self.label
-        if self.list_id is not None: body['list_id'] = self.list_id
-        if self.list_type is not None: body['list_type'] = self.list_type.value
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.address_count is not None:
+            body["address_count"] = self.address_count
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.ip_addresses:
+            body["ip_addresses"] = [v for v in self.ip_addresses]
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_id is not None:
+            body["list_id"] = self.list_id
+        if self.list_type is not None:
+            body["list_type"] = self.list_type.value
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the IpAccessListInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.address_count is not None: body['address_count'] = self.address_count
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
-        if self.label is not None: body['label'] = self.label
-        if self.list_id is not None: body['list_id'] = self.list_id
-        if self.list_type is not None: body['list_type'] = self.list_type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.address_count is not None:
+            body["address_count"] = self.address_count
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.ip_addresses:
+            body["ip_addresses"] = self.ip_addresses
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_id is not None:
+            body["list_id"] = self.list_id
+        if self.list_type is not None:
+            body["list_type"] = self.list_type
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IpAccessListInfo:
         """Deserializes the IpAccessListInfo from a dictionary."""
-        return cls(address_count=d.get('address_count', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   enabled=d.get('enabled', None),
-                   ip_addresses=d.get('ip_addresses', None),
-                   label=d.get('label', None),
-                   list_id=d.get('list_id', None),
-                   list_type=_enum(d, 'list_type', ListType),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            address_count=d.get("address_count", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            enabled=d.get("enabled", None),
+            ip_addresses=d.get("ip_addresses", None),
+            label=d.get("label", None),
+            list_id=d.get("list_id", None),
+            list_type=_enum(d, "list_type", ListType),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -2296,19 +2675,21 @@ class ListIpAccessListResponse:
     def as_dict(self) -> dict:
         """Serializes the ListIpAccessListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists]
+        if self.ip_access_lists:
+            body["ip_access_lists"] = [v.as_dict() for v in self.ip_access_lists]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListIpAccessListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists
+        if self.ip_access_lists:
+            body["ip_access_lists"] = self.ip_access_lists
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListIpAccessListResponse:
         """Deserializes the ListIpAccessListResponse from a dictionary."""
-        return cls(ip_access_lists=_repeated_dict(d, 'ip_access_lists', IpAccessListInfo))
+        return cls(ip_access_lists=_repeated_dict(d, "ip_access_lists", IpAccessListInfo))
 
 
 @dataclass
@@ -2322,22 +2703,28 @@ class ListNccAzurePrivateEndpointRulesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListNccAzurePrivateEndpointRulesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.items: body['items'] = [v.as_dict() for v in self.items]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.items:
+            body["items"] = [v.as_dict() for v in self.items]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListNccAzurePrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.items: body['items'] = self.items
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.items:
+            body["items"] = self.items
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNccAzurePrivateEndpointRulesResponse:
         """Deserializes the ListNccAzurePrivateEndpointRulesResponse from a dictionary."""
-        return cls(items=_repeated_dict(d, 'items', NccAzurePrivateEndpointRule),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            items=_repeated_dict(d, "items", NccAzurePrivateEndpointRule),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -2351,22 +2738,28 @@ class ListNetworkConnectivityConfigurationsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListNetworkConnectivityConfigurationsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.items: body['items'] = [v.as_dict() for v in self.items]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.items:
+            body["items"] = [v.as_dict() for v in self.items]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListNetworkConnectivityConfigurationsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.items: body['items'] = self.items
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.items:
+            body["items"] = self.items
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsResponse:
         """Deserializes the ListNetworkConnectivityConfigurationsResponse from a dictionary."""
-        return cls(items=_repeated_dict(d, 'items', NetworkConnectivityConfiguration),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            items=_repeated_dict(d, "items", NetworkConnectivityConfiguration),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -2379,22 +2772,28 @@ class ListNotificationDestinationsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListNotificationDestinationsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListNotificationDestinationsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = self.results
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse:
         """Deserializes the ListNotificationDestinationsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   results=_repeated_dict(d, 'results', ListNotificationDestinationsResult))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            results=_repeated_dict(d, "results", ListNotificationDestinationsResult),
+        )
 
 
 @dataclass
@@ -2411,25 +2810,33 @@ class ListNotificationDestinationsResult:
     def as_dict(self) -> dict:
         """Serializes the ListNotificationDestinationsResult into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.destination_type is not None: body['destination_type'] = self.destination_type.value
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.destination_type is not None:
+            body["destination_type"] = self.destination_type.value
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListNotificationDestinationsResult into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.destination_type is not None: body['destination_type'] = self.destination_type
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.destination_type is not None:
+            body["destination_type"] = self.destination_type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult:
         """Deserializes the ListNotificationDestinationsResult from a dictionary."""
-        return cls(destination_type=_enum(d, 'destination_type', DestinationType),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None))
+        return cls(
+            destination_type=_enum(d, "destination_type", DestinationType),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+        )
 
 
 @dataclass
@@ -2440,19 +2847,21 @@ class ListPublicTokensResponse:
     def as_dict(self) -> dict:
         """Serializes the ListPublicTokensResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos]
+        if self.token_infos:
+            body["token_infos"] = [v.as_dict() for v in self.token_infos]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListPublicTokensResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.token_infos: body['token_infos'] = self.token_infos
+        if self.token_infos:
+            body["token_infos"] = self.token_infos
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPublicTokensResponse:
         """Deserializes the ListPublicTokensResponse from a dictionary."""
-        return cls(token_infos=_repeated_dict(d, 'token_infos', PublicTokenInfo))
+        return cls(token_infos=_repeated_dict(d, "token_infos", PublicTokenInfo))
 
 
 @dataclass
@@ -2465,29 +2874,32 @@ class ListTokensResponse:
     def as_dict(self) -> dict:
         """Serializes the ListTokensResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos]
+        if self.token_infos:
+            body["token_infos"] = [v.as_dict() for v in self.token_infos]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListTokensResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.token_infos: body['token_infos'] = self.token_infos
+        if self.token_infos:
+            body["token_infos"] = self.token_infos
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTokensResponse:
         """Deserializes the ListTokensResponse from a dictionary."""
-        return cls(token_infos=_repeated_dict(d, 'token_infos', TokenInfo))
+        return cls(token_infos=_repeated_dict(d, "token_infos", TokenInfo))
 
 
 class ListType(Enum):
     """Type of IP access list. Valid values are as follows and are case-sensitive:
-    
+
     * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-    range. IP addresses in the block list are excluded even if they are included in an allow list."""
+    range. IP addresses in the block list are excluded even if they are included in an allow list.
+    """
 
-    ALLOW = 'ALLOW'
-    BLOCK = 'BLOCK'
+    ALLOW = "ALLOW"
+    BLOCK = "BLOCK"
 
 
 @dataclass
@@ -2501,21 +2913,25 @@ class MicrosoftTeamsConfig:
     def as_dict(self) -> dict:
         """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.url is not None: body['url'] = self.url
-        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.url is not None:
+            body["url"] = self.url
+        if self.url_set is not None:
+            body["url_set"] = self.url_set
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.url is not None: body['url'] = self.url
-        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.url is not None:
+            body["url"] = self.url
+        if self.url_set is not None:
+            body["url_set"] = self.url_set
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig:
         """Deserializes the MicrosoftTeamsConfig from a dictionary."""
-        return cls(url=d.get('url', None), url_set=d.get('url_set', None))
+        return cls(url=d.get("url", None), url_set=d.get("url_set", None))
 
 
 @dataclass
@@ -2530,19 +2946,21 @@ class NccAwsStableIpRule:
     def as_dict(self) -> dict:
         """Serializes the NccAwsStableIpRule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.cidr_blocks: body['cidr_blocks'] = [v for v in self.cidr_blocks]
+        if self.cidr_blocks:
+            body["cidr_blocks"] = [v for v in self.cidr_blocks]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NccAwsStableIpRule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cidr_blocks: body['cidr_blocks'] = self.cidr_blocks
+        if self.cidr_blocks:
+            body["cidr_blocks"] = self.cidr_blocks
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAwsStableIpRule:
         """Deserializes the NccAwsStableIpRule from a dictionary."""
-        return cls(cidr_blocks=d.get('cidr_blocks', None))
+        return cls(cidr_blocks=d.get("cidr_blocks", None))
 
 
 @dataclass
@@ -2591,55 +3009,79 @@ class NccAzurePrivateEndpointRule:
     def as_dict(self) -> dict:
         """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.connection_state is not None: body['connection_state'] = self.connection_state.value
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.deactivated is not None: body['deactivated'] = self.deactivated
-        if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.group_id is not None: body['group_id'] = self.group_id.value
+        if self.connection_state is not None:
+            body["connection_state"] = self.connection_state.value
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.deactivated is not None:
+            body["deactivated"] = self.deactivated
+        if self.deactivated_at is not None:
+            body["deactivated_at"] = self.deactivated_at
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.group_id is not None:
+            body["group_id"] = self.group_id.value
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
-        if self.rule_id is not None: body['rule_id'] = self.rule_id
-        if self.updated_time is not None: body['updated_time'] = self.updated_time
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
+        if self.rule_id is not None:
+            body["rule_id"] = self.rule_id
+        if self.updated_time is not None:
+            body["updated_time"] = self.updated_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NccAzurePrivateEndpointRule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connection_state is not None: body['connection_state'] = self.connection_state
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.deactivated is not None: body['deactivated'] = self.deactivated
-        if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.group_id is not None: body['group_id'] = self.group_id
+        if self.connection_state is not None:
+            body["connection_state"] = self.connection_state
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.deactivated is not None:
+            body["deactivated"] = self.deactivated
+        if self.deactivated_at is not None:
+            body["deactivated_at"] = self.deactivated_at
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.group_id is not None:
+            body["group_id"] = self.group_id
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
-        if self.rule_id is not None: body['rule_id'] = self.rule_id
-        if self.updated_time is not None: body['updated_time'] = self.updated_time
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
+        if self.rule_id is not None:
+            body["rule_id"] = self.rule_id
+        if self.updated_time is not None:
+            body["updated_time"] = self.updated_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAzurePrivateEndpointRule:
         """Deserializes the NccAzurePrivateEndpointRule from a dictionary."""
-        return cls(connection_state=_enum(d, 'connection_state', NccAzurePrivateEndpointRuleConnectionState),
-                   creation_time=d.get('creation_time', None),
-                   deactivated=d.get('deactivated', None),
-                   deactivated_at=d.get('deactivated_at', None),
-                   endpoint_name=d.get('endpoint_name', None),
-                   group_id=_enum(d, 'group_id', NccAzurePrivateEndpointRuleGroupId),
-                   network_connectivity_config_id=d.get('network_connectivity_config_id', None),
-                   resource_id=d.get('resource_id', None),
-                   rule_id=d.get('rule_id', None),
-                   updated_time=d.get('updated_time', None))
+        return cls(
+            connection_state=_enum(
+                d,
+                "connection_state",
+                NccAzurePrivateEndpointRuleConnectionState,
+            ),
+            creation_time=d.get("creation_time", None),
+            deactivated=d.get("deactivated", None),
+            deactivated_at=d.get("deactivated_at", None),
+            endpoint_name=d.get("endpoint_name", None),
+            group_id=_enum(d, "group_id", NccAzurePrivateEndpointRuleGroupId),
+            network_connectivity_config_id=d.get("network_connectivity_config_id", None),
+            resource_id=d.get("resource_id", None),
+            rule_id=d.get("rule_id", None),
+            updated_time=d.get("updated_time", None),
+        )
 
 
 class NccAzurePrivateEndpointRuleConnectionState(Enum):
     """The current status of this private endpoint. The private endpoint rules are effective only if
     the connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your
     resources in the Azure portal before they take effect.
-    
+
     The possible values are: - INIT: (deprecated) The endpoint has been created and pending
     approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The
     endpoint has been approved and is ready to use in your serverless compute resources. - REJECTED:
@@ -2647,21 +3089,22 @@ class NccAzurePrivateEndpointRuleConnectionState(Enum):
     removed by the private link resource owner, the private endpoint becomes informative and should
     be deleted for clean-up."""
 
-    DISCONNECTED = 'DISCONNECTED'
-    ESTABLISHED = 'ESTABLISHED'
-    INIT = 'INIT'
-    PENDING = 'PENDING'
-    REJECTED = 'REJECTED'
+    DISCONNECTED = "DISCONNECTED"
+    ESTABLISHED = "ESTABLISHED"
+    INIT = "INIT"
+    PENDING = "PENDING"
+    REJECTED = "REJECTED"
 
 
 class NccAzurePrivateEndpointRuleGroupId(Enum):
     """The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
-    storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`."""
+    storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
+    """
 
-    BLOB = 'blob'
-    DFS = 'dfs'
-    MYSQL_SERVER = 'mysqlServer'
-    SQL_SERVER = 'sqlServer'
+    BLOB = "blob"
+    DFS = "dfs"
+    MYSQL_SERVER = "mysqlServer"
+    SQL_SERVER = "sqlServer"
 
 
 @dataclass
@@ -2682,25 +3125,33 @@ class NccAzureServiceEndpointRule:
     def as_dict(self) -> dict:
         """Serializes the NccAzureServiceEndpointRule into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.subnets: body['subnets'] = [v for v in self.subnets]
-        if self.target_region is not None: body['target_region'] = self.target_region
-        if self.target_services: body['target_services'] = [v for v in self.target_services]
+        if self.subnets:
+            body["subnets"] = [v for v in self.subnets]
+        if self.target_region is not None:
+            body["target_region"] = self.target_region
+        if self.target_services:
+            body["target_services"] = [v for v in self.target_services]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NccAzureServiceEndpointRule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.subnets: body['subnets'] = self.subnets
-        if self.target_region is not None: body['target_region'] = self.target_region
-        if self.target_services: body['target_services'] = self.target_services
+        if self.subnets:
+            body["subnets"] = self.subnets
+        if self.target_region is not None:
+            body["target_region"] = self.target_region
+        if self.target_services:
+            body["target_services"] = self.target_services
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAzureServiceEndpointRule:
         """Deserializes the NccAzureServiceEndpointRule from a dictionary."""
-        return cls(subnets=d.get('subnets', None),
-                   target_region=d.get('target_region', None),
-                   target_services=d.get('target_services', None))
+        return cls(
+            subnets=d.get("subnets", None),
+            target_region=d.get("target_region", None),
+            target_services=d.get("target_services", None),
+        )
 
 
 @dataclass
@@ -2720,22 +3171,28 @@ class NccEgressConfig:
     def as_dict(self) -> dict:
         """Serializes the NccEgressConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.default_rules: body['default_rules'] = self.default_rules.as_dict()
-        if self.target_rules: body['target_rules'] = self.target_rules.as_dict()
+        if self.default_rules:
+            body["default_rules"] = self.default_rules.as_dict()
+        if self.target_rules:
+            body["target_rules"] = self.target_rules.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NccEgressConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.default_rules: body['default_rules'] = self.default_rules
-        if self.target_rules: body['target_rules'] = self.target_rules
+        if self.default_rules:
+            body["default_rules"] = self.default_rules
+        if self.target_rules:
+            body["target_rules"] = self.target_rules
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressConfig:
         """Deserializes the NccEgressConfig from a dictionary."""
-        return cls(default_rules=_from_dict(d, 'default_rules', NccEgressDefaultRules),
-                   target_rules=_from_dict(d, 'target_rules', NccEgressTargetRules))
+        return cls(
+            default_rules=_from_dict(d, "default_rules", NccEgressDefaultRules),
+            target_rules=_from_dict(d, "target_rules", NccEgressTargetRules),
+        )
 
 
 @dataclass
@@ -2755,25 +3212,28 @@ class NccEgressDefaultRules:
     def as_dict(self) -> dict:
         """Serializes the NccEgressDefaultRules into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule.as_dict()
+        if self.aws_stable_ip_rule:
+            body["aws_stable_ip_rule"] = self.aws_stable_ip_rule.as_dict()
         if self.azure_service_endpoint_rule:
-            body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict()
+            body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NccEgressDefaultRules into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule
+        if self.aws_stable_ip_rule:
+            body["aws_stable_ip_rule"] = self.aws_stable_ip_rule
         if self.azure_service_endpoint_rule:
-            body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule
+            body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressDefaultRules:
         """Deserializes the NccEgressDefaultRules from a dictionary."""
-        return cls(aws_stable_ip_rule=_from_dict(d, 'aws_stable_ip_rule', NccAwsStableIpRule),
-                   azure_service_endpoint_rule=_from_dict(d, 'azure_service_endpoint_rule',
-                                                          NccAzureServiceEndpointRule))
+        return cls(
+            aws_stable_ip_rule=_from_dict(d, "aws_stable_ip_rule", NccAwsStableIpRule),
+            azure_service_endpoint_rule=_from_dict(d, "azure_service_endpoint_rule", NccAzureServiceEndpointRule),
+        )
 
 
 @dataclass
@@ -2787,21 +3247,22 @@ def as_dict(self) -> dict:
         """Serializes the NccEgressTargetRules into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.azure_private_endpoint_rules:
-            body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules]
+            body["azure_private_endpoint_rules"] = [v.as_dict() for v in self.azure_private_endpoint_rules]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NccEgressTargetRules into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.azure_private_endpoint_rules:
-            body['azure_private_endpoint_rules'] = self.azure_private_endpoint_rules
+            body["azure_private_endpoint_rules"] = self.azure_private_endpoint_rules
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressTargetRules:
         """Deserializes the NccEgressTargetRules from a dictionary."""
-        return cls(azure_private_endpoint_rules=_repeated_dict(d, 'azure_private_endpoint_rules',
-                                                               NccAzurePrivateEndpointRule))
+        return cls(
+            azure_private_endpoint_rules=_repeated_dict(d, "azure_private_endpoint_rules", NccAzurePrivateEndpointRule)
+        )
 
 
 @dataclass
@@ -2834,39 +3295,53 @@ class NetworkConnectivityConfiguration:
     def as_dict(self) -> dict:
         """Serializes the NetworkConnectivityConfiguration into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.egress_config: body['egress_config'] = self.egress_config.as_dict()
-        if self.name is not None: body['name'] = self.name
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.egress_config:
+            body["egress_config"] = self.egress_config.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.region is not None: body['region'] = self.region
-        if self.updated_time is not None: body['updated_time'] = self.updated_time
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.region is not None:
+            body["region"] = self.region
+        if self.updated_time is not None:
+            body["updated_time"] = self.updated_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.account_id is not None: body['account_id'] = self.account_id
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.egress_config: body['egress_config'] = self.egress_config
-        if self.name is not None: body['name'] = self.name
+        if self.account_id is not None:
+            body["account_id"] = self.account_id
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.egress_config:
+            body["egress_config"] = self.egress_config
+        if self.name is not None:
+            body["name"] = self.name
         if self.network_connectivity_config_id is not None:
-            body['network_connectivity_config_id'] = self.network_connectivity_config_id
-        if self.region is not None: body['region'] = self.region
-        if self.updated_time is not None: body['updated_time'] = self.updated_time
+            body["network_connectivity_config_id"] = self.network_connectivity_config_id
+        if self.region is not None:
+            body["region"] = self.region
+        if self.updated_time is not None:
+            body["updated_time"] = self.updated_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration:
         """Deserializes the NetworkConnectivityConfiguration from a dictionary."""
-        return cls(account_id=d.get('account_id', None),
-                   creation_time=d.get('creation_time', None),
-                   egress_config=_from_dict(d, 'egress_config', NccEgressConfig),
-                   name=d.get('name', None),
-                   network_connectivity_config_id=d.get('network_connectivity_config_id', None),
-                   region=d.get('region', None),
-                   updated_time=d.get('updated_time', None))
+        return cls(
+            account_id=d.get("account_id", None),
+            creation_time=d.get("creation_time", None),
+            egress_config=_from_dict(d, "egress_config", NccEgressConfig),
+            name=d.get("name", None),
+            network_connectivity_config_id=d.get("network_connectivity_config_id", None),
+            region=d.get("region", None),
+            updated_time=d.get("updated_time", None),
+        )
 
 
 @dataclass
@@ -2887,28 +3362,38 @@ class NotificationDestination:
     def as_dict(self) -> dict:
         """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.config: body['config'] = self.config.as_dict()
-        if self.destination_type is not None: body['destination_type'] = self.destination_type.value
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.destination_type is not None:
+            body["destination_type"] = self.destination_type.value
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.config: body['config'] = self.config
-        if self.destination_type is not None: body['destination_type'] = self.destination_type
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.config:
+            body["config"] = self.config
+        if self.destination_type is not None:
+            body["destination_type"] = self.destination_type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotificationDestination:
         """Deserializes the NotificationDestination from a dictionary."""
-        return cls(config=_from_dict(d, 'config', Config),
-                   destination_type=_enum(d, 'destination_type', DestinationType),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None))
+        return cls(
+            config=_from_dict(d, "config", Config),
+            destination_type=_enum(d, "destination_type", DestinationType),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+        )
 
 
 @dataclass
@@ -2922,22 +3407,28 @@ class PagerdutyConfig:
     def as_dict(self) -> dict:
         """Serializes the PagerdutyConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.integration_key is not None: body['integration_key'] = self.integration_key
-        if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
+        if self.integration_key is not None:
+            body["integration_key"] = self.integration_key
+        if self.integration_key_set is not None:
+            body["integration_key_set"] = self.integration_key_set
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PagerdutyConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.integration_key is not None: body['integration_key'] = self.integration_key
-        if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
+        if self.integration_key is not None:
+            body["integration_key"] = self.integration_key
+        if self.integration_key_set is not None:
+            body["integration_key_set"] = self.integration_key_set
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig:
         """Deserializes the PagerdutyConfig from a dictionary."""
-        return cls(integration_key=d.get('integration_key', None),
-                   integration_key_set=d.get('integration_key_set', None))
+        return cls(
+            integration_key=d.get("integration_key", None),
+            integration_key_set=d.get("integration_key_set", None),
+        )
 
 
 @dataclass
@@ -2950,19 +3441,21 @@ class PartitionId:
     def as_dict(self) -> dict:
         """Serializes the PartitionId into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.workspace_id is not None: body['workspaceId'] = self.workspace_id
+        if self.workspace_id is not None:
+            body["workspaceId"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PartitionId into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.workspace_id is not None: body['workspaceId'] = self.workspace_id
+        if self.workspace_id is not None:
+            body["workspaceId"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionId:
         """Deserializes the PartitionId from a dictionary."""
-        return cls(workspace_id=d.get('workspaceId', None))
+        return cls(workspace_id=d.get("workspaceId", None))
 
 
 @dataclass
@@ -2977,19 +3470,21 @@ class PersonalComputeMessage:
     def as_dict(self) -> dict:
         """Serializes the PersonalComputeMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value is not None: body['value'] = self.value.value
+        if self.value is not None:
+            body["value"] = self.value.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalComputeMessage:
         """Deserializes the PersonalComputeMessage from a dictionary."""
-        return cls(value=_enum(d, 'value', PersonalComputeMessageEnum))
+        return cls(value=_enum(d, "value", PersonalComputeMessageEnum))
 
 
 class PersonalComputeMessageEnum(Enum):
@@ -2999,8 +3494,8 @@ class PersonalComputeMessageEnum(Enum):
     groups to be added to the ACLs of that workspace’s Personal Compute default policy before they
     will be able to create compute resources through that policy."""
 
-    DELEGATE = 'DELEGATE'
-    ON = 'ON'
+    DELEGATE = "DELEGATE"
+    ON = "ON"
 
 
 @dataclass
@@ -3024,25 +3519,33 @@ class PersonalComputeSetting:
     def as_dict(self) -> dict:
         """Serializes the PersonalComputeSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.personal_compute: body['personal_compute'] = self.personal_compute.as_dict()
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.personal_compute:
+            body["personal_compute"] = self.personal_compute.as_dict()
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PersonalComputeSetting into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.personal_compute: body['personal_compute'] = self.personal_compute
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.personal_compute:
+            body["personal_compute"] = self.personal_compute
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalComputeSetting:
         """Deserializes the PersonalComputeSetting from a dictionary."""
-        return cls(etag=d.get('etag', None),
-                   personal_compute=_from_dict(d, 'personal_compute', PersonalComputeMessage),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            etag=d.get("etag", None),
+            personal_compute=_from_dict(d, "personal_compute", PersonalComputeMessage),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -3062,28 +3565,38 @@ class PublicTokenInfo:
     def as_dict(self) -> dict:
         """Serializes the PublicTokenInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
-        if self.token_id is not None: body['token_id'] = self.token_id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.expiry_time is not None:
+            body["expiry_time"] = self.expiry_time
+        if self.token_id is not None:
+            body["token_id"] = self.token_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PublicTokenInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
-        if self.token_id is not None: body['token_id'] = self.token_id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.expiry_time is not None:
+            body["expiry_time"] = self.expiry_time
+        if self.token_id is not None:
+            body["token_id"] = self.token_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublicTokenInfo:
         """Deserializes the PublicTokenInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   creation_time=d.get('creation_time', None),
-                   expiry_time=d.get('expiry_time', None),
-                   token_id=d.get('token_id', None))
+        return cls(
+            comment=d.get("comment", None),
+            creation_time=d.get("creation_time", None),
+            expiry_time=d.get("expiry_time", None),
+            token_id=d.get("token_id", None),
+        )
 
 
 @dataclass
@@ -3110,31 +3623,43 @@ class ReplaceIpAccessList:
     def as_dict(self) -> dict:
         """Serializes the ReplaceIpAccessList into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
-        if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses]
-        if self.label is not None: body['label'] = self.label
-        if self.list_type is not None: body['list_type'] = self.list_type.value
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.ip_access_list_id is not None:
+            body["ip_access_list_id"] = self.ip_access_list_id
+        if self.ip_addresses:
+            body["ip_addresses"] = [v for v in self.ip_addresses]
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_type is not None:
+            body["list_type"] = self.list_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
-        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
-        if self.label is not None: body['label'] = self.label
-        if self.list_type is not None: body['list_type'] = self.list_type
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.ip_access_list_id is not None:
+            body["ip_access_list_id"] = self.ip_access_list_id
+        if self.ip_addresses:
+            body["ip_addresses"] = self.ip_addresses
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_type is not None:
+            body["list_type"] = self.list_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceIpAccessList:
         """Deserializes the ReplaceIpAccessList from a dictionary."""
-        return cls(enabled=d.get('enabled', None),
-                   ip_access_list_id=d.get('ip_access_list_id', None),
-                   ip_addresses=d.get('ip_addresses', None),
-                   label=d.get('label', None),
-                   list_type=_enum(d, 'list_type', ListType))
+        return cls(
+            enabled=d.get("enabled", None),
+            ip_access_list_id=d.get("ip_access_list_id", None),
+            ip_addresses=d.get("ip_addresses", None),
+            label=d.get("label", None),
+            list_type=_enum(d, "list_type", ListType),
+        )
 
 
 @dataclass
@@ -3163,25 +3688,27 @@ class RestrictWorkspaceAdminsMessage:
     def as_dict(self) -> dict:
         """Serializes the RestrictWorkspaceAdminsMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.status is not None: body['status'] = self.status.value
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.status is not None: body['status'] = self.status
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsMessage:
         """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary."""
-        return cls(status=_enum(d, 'status', RestrictWorkspaceAdminsMessageStatus))
+        return cls(status=_enum(d, "status", RestrictWorkspaceAdminsMessageStatus))
 
 
 class RestrictWorkspaceAdminsMessageStatus(Enum):
 
-    ALLOW_ALL = 'ALLOW_ALL'
-    RESTRICT_TOKENS_AND_JOB_RUN_AS = 'RESTRICT_TOKENS_AND_JOB_RUN_AS'
+    ALLOW_ALL = "ALLOW_ALL"
+    RESTRICT_TOKENS_AND_JOB_RUN_AS = "RESTRICT_TOKENS_AND_JOB_RUN_AS"
 
 
 @dataclass
@@ -3205,27 +3732,33 @@ class RestrictWorkspaceAdminsSetting:
     def as_dict(self) -> dict:
         """Serializes the RestrictWorkspaceAdminsSetting into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
+        if self.etag is not None:
+            body["etag"] = self.etag
         if self.restrict_workspace_admins:
-            body['restrict_workspace_admins'] = self.restrict_workspace_admins.as_dict()
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+            body["restrict_workspace_admins"] = self.restrict_workspace_admins.as_dict()
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RestrictWorkspaceAdminsSetting into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.etag is not None: body['etag'] = self.etag
-        if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins
-        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        if self.etag is not None:
+            body["etag"] = self.etag
+        if self.restrict_workspace_admins:
+            body["restrict_workspace_admins"] = self.restrict_workspace_admins
+        if self.setting_name is not None:
+            body["setting_name"] = self.setting_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsSetting:
         """Deserializes the RestrictWorkspaceAdminsSetting from a dictionary."""
-        return cls(etag=d.get('etag', None),
-                   restrict_workspace_admins=_from_dict(d, 'restrict_workspace_admins',
-                                                        RestrictWorkspaceAdminsMessage),
-                   setting_name=d.get('setting_name', None))
+        return cls(
+            etag=d.get("etag", None),
+            restrict_workspace_admins=_from_dict(d, "restrict_workspace_admins", RestrictWorkspaceAdminsMessage),
+            setting_name=d.get("setting_name", None),
+        )
 
 
 @dataclass
@@ -3236,19 +3769,21 @@ class RevokeTokenRequest:
     def as_dict(self) -> dict:
         """Serializes the RevokeTokenRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.token_id is not None: body['token_id'] = self.token_id
+        if self.token_id is not None:
+            body["token_id"] = self.token_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.token_id is not None: body['token_id'] = self.token_id
+        if self.token_id is not None:
+            body["token_id"] = self.token_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RevokeTokenRequest:
         """Deserializes the RevokeTokenRequest from a dictionary."""
-        return cls(token_id=d.get('token_id', None))
+        return cls(token_id=d.get("token_id", None))
 
 
 @dataclass
@@ -3300,21 +3835,25 @@ class SlackConfig:
     def as_dict(self) -> dict:
         """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.url is not None: body['url'] = self.url
-        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.url is not None:
+            body["url"] = self.url
+        if self.url_set is not None:
+            body["url_set"] = self.url_set
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SlackConfig into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.url is not None: body['url'] = self.url
-        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.url is not None:
+            body["url"] = self.url
+        if self.url_set is not None:
+            body["url_set"] = self.url_set
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SlackConfig:
         """Deserializes the SlackConfig from a dictionary."""
-        return cls(url=d.get('url', None), url_set=d.get('url_set', None))
+        return cls(url=d.get("url", None), url_set=d.get("url_set", None))
 
 
 @dataclass
@@ -3325,19 +3864,21 @@ class StringMessage:
     def as_dict(self) -> dict:
         """Serializes the StringMessage into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StringMessage into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StringMessage:
         """Deserializes the StringMessage from a dictionary."""
-        return cls(value=d.get('value', None))
+        return cls(value=d.get("value", None))
 
 
 @dataclass
@@ -3357,30 +3898,38 @@ class TokenAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the TokenAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlRequest:
         """Deserializes the TokenAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', TokenPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", TokenPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -3403,33 +3952,43 @@ class TokenAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the TokenAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlResponse:
         """Deserializes the TokenAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', TokenPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", TokenPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -3464,43 +4023,63 @@ class TokenInfo:
     def as_dict(self) -> dict:
         """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
-        if self.created_by_username is not None: body['created_by_username'] = self.created_by_username
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
-        if self.last_used_day is not None: body['last_used_day'] = self.last_used_day
-        if self.owner_id is not None: body['owner_id'] = self.owner_id
-        if self.token_id is not None: body['token_id'] = self.token_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_by_id is not None:
+            body["created_by_id"] = self.created_by_id
+        if self.created_by_username is not None:
+            body["created_by_username"] = self.created_by_username
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.expiry_time is not None:
+            body["expiry_time"] = self.expiry_time
+        if self.last_used_day is not None:
+            body["last_used_day"] = self.last_used_day
+        if self.owner_id is not None:
+            body["owner_id"] = self.owner_id
+        if self.token_id is not None:
+            body["token_id"] = self.token_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
-        if self.created_by_username is not None: body['created_by_username'] = self.created_by_username
-        if self.creation_time is not None: body['creation_time'] = self.creation_time
-        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
-        if self.last_used_day is not None: body['last_used_day'] = self.last_used_day
-        if self.owner_id is not None: body['owner_id'] = self.owner_id
-        if self.token_id is not None: body['token_id'] = self.token_id
-        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_by_id is not None:
+            body["created_by_id"] = self.created_by_id
+        if self.created_by_username is not None:
+            body["created_by_username"] = self.created_by_username
+        if self.creation_time is not None:
+            body["creation_time"] = self.creation_time
+        if self.expiry_time is not None:
+            body["expiry_time"] = self.expiry_time
+        if self.last_used_day is not None:
+            body["last_used_day"] = self.last_used_day
+        if self.owner_id is not None:
+            body["owner_id"] = self.owner_id
+        if self.token_id is not None:
+            body["token_id"] = self.token_id
+        if self.workspace_id is not None:
+            body["workspace_id"] = self.workspace_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
         """Deserializes the TokenInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   created_by_id=d.get('created_by_id', None),
-                   created_by_username=d.get('created_by_username', None),
-                   creation_time=d.get('creation_time', None),
-                   expiry_time=d.get('expiry_time', None),
-                   last_used_day=d.get('last_used_day', None),
-                   owner_id=d.get('owner_id', None),
-                   token_id=d.get('token_id', None),
-                   workspace_id=d.get('workspace_id', None))
+        return cls(
+            comment=d.get("comment", None),
+            created_by_id=d.get("created_by_id", None),
+            created_by_username=d.get("created_by_username", None),
+            creation_time=d.get("creation_time", None),
+            expiry_time=d.get("expiry_time", None),
+            last_used_day=d.get("last_used_day", None),
+            owner_id=d.get("owner_id", None),
+            token_id=d.get("token_id", None),
+            workspace_id=d.get("workspace_id", None),
+        )
 
 
 @dataclass
@@ -3515,31 +4094,39 @@ class TokenPermission:
     def as_dict(self) -> dict:
         """Serializes the TokenPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermission:
         """Deserializes the TokenPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', TokenPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", TokenPermissionLevel),
+        )
 
 
 class TokenPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_USE = 'CAN_USE'
+    CAN_USE = "CAN_USE"
 
 
 @dataclass
@@ -3554,25 +4141,32 @@ def as_dict(self) -> dict:
         """Serializes the TokenPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissions:
         """Deserializes the TokenPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', TokenAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -3585,22 +4179,28 @@ class TokenPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the TokenPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsDescription:
         """Deserializes the TokenPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', TokenPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", TokenPermissionLevel),
+        )
 
 
 @dataclass
@@ -3611,27 +4211,28 @@ def as_dict(self) -> dict:
         """Serializes the TokenPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsRequest:
         """Deserializes the TokenPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', TokenAccessControlRequest))
+        return cls(access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlRequest))
 
 
 class TokenType(Enum):
     """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported."""
 
-    ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN'
-    ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY'
-    AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN'
+    ARCLIGHT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_AZURE_EXCHANGE_TOKEN"
+    ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY"
+    AZURE_ACTIVE_DIRECTORY_TOKEN = "AZURE_ACTIVE_DIRECTORY_TOKEN"
 
 
 @dataclass
@@ -3657,25 +4258,33 @@ class UpdateAccountIpAccessEnableRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAccountIpAccessEnableRequest:
         """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', AccountIpAccessEnable))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", AccountIpAccessEnable),
+        )
 
 
 @dataclass
@@ -3701,25 +4310,33 @@ class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
         """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', AibiDashboardEmbeddingAccessPolicySetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", AibiDashboardEmbeddingAccessPolicySetting),
+        )
 
 
 @dataclass
@@ -3745,25 +4362,33 @@ class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
         """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', AibiDashboardEmbeddingApprovedDomainsSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", AibiDashboardEmbeddingApprovedDomainsSetting),
+        )
 
 
 @dataclass
@@ -3789,25 +4414,33 @@ class UpdateAutomaticClusterUpdateSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAutomaticClusterUpdateSettingRequest:
         """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', AutomaticClusterUpdateSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", AutomaticClusterUpdateSetting),
+        )
 
 
 @dataclass
@@ -3833,25 +4466,33 @@ class UpdateComplianceSecurityProfileSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateComplianceSecurityProfileSettingRequest:
         """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', ComplianceSecurityProfileSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", ComplianceSecurityProfileSetting),
+        )
 
 
 @dataclass
@@ -3877,25 +4518,33 @@ class UpdateCspEnablementAccountSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementAccountSettingRequest:
         """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', CspEnablementAccountSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", CspEnablementAccountSetting),
+        )
 
 
 @dataclass
@@ -3928,25 +4577,33 @@ class UpdateDefaultNamespaceSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest:
         """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', DefaultNamespaceSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", DefaultNamespaceSetting),
+        )
 
 
 @dataclass
@@ -3972,25 +4629,33 @@ class UpdateDisableLegacyAccessRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest:
         """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', DisableLegacyAccess))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", DisableLegacyAccess),
+        )
 
 
 @dataclass
@@ -4016,25 +4681,33 @@ class UpdateDisableLegacyDbfsRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest:
         """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', DisableLegacyDbfs))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", DisableLegacyDbfs),
+        )
 
 
 @dataclass
@@ -4060,25 +4733,33 @@ class UpdateDisableLegacyFeaturesRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyFeaturesRequest:
         """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', DisableLegacyFeatures))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", DisableLegacyFeatures),
+        )
 
 
 @dataclass
@@ -4104,25 +4785,33 @@ class UpdateEnhancedSecurityMonitoringSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateEnhancedSecurityMonitoringSettingRequest:
         """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', EnhancedSecurityMonitoringSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", EnhancedSecurityMonitoringSetting),
+        )
 
 
 @dataclass
@@ -4148,25 +4837,33 @@ class UpdateEsmEnablementAccountSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementAccountSettingRequest:
         """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', EsmEnablementAccountSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", EsmEnablementAccountSetting),
+        )
 
 
 @dataclass
@@ -4193,31 +4890,43 @@ class UpdateIpAccessList:
     def as_dict(self) -> dict:
         """Serializes the UpdateIpAccessList into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
-        if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses]
-        if self.label is not None: body['label'] = self.label
-        if self.list_type is not None: body['list_type'] = self.list_type.value
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.ip_access_list_id is not None:
+            body["ip_access_list_id"] = self.ip_access_list_id
+        if self.ip_addresses:
+            body["ip_addresses"] = [v for v in self.ip_addresses]
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_type is not None:
+            body["list_type"] = self.list_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
-        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
-        if self.label is not None: body['label'] = self.label
-        if self.list_type is not None: body['list_type'] = self.list_type
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.ip_access_list_id is not None:
+            body["ip_access_list_id"] = self.ip_access_list_id
+        if self.ip_addresses:
+            body["ip_addresses"] = self.ip_addresses
+        if self.label is not None:
+            body["label"] = self.label
+        if self.list_type is not None:
+            body["list_type"] = self.list_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList:
         """Deserializes the UpdateIpAccessList from a dictionary."""
-        return cls(enabled=d.get('enabled', None),
-                   ip_access_list_id=d.get('ip_access_list_id', None),
-                   ip_addresses=d.get('ip_addresses', None),
-                   label=d.get('label', None),
-                   list_type=_enum(d, 'list_type', ListType))
+        return cls(
+            enabled=d.get("enabled", None),
+            ip_access_list_id=d.get("ip_access_list_id", None),
+            ip_addresses=d.get("ip_addresses", None),
+            label=d.get("label", None),
+            list_type=_enum(d, "list_type", ListType),
+        )
 
 
 @dataclass
@@ -4234,25 +4943,33 @@ class UpdateNotificationDestinationRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.config: body['config'] = self.config.as_dict()
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.config:
+            body["config"] = self.config.as_dict()
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.config: body['config'] = self.config
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.config:
+            body["config"] = self.config
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest:
         """Deserializes the UpdateNotificationDestinationRequest from a dictionary."""
-        return cls(config=_from_dict(d, 'config', Config),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None))
+        return cls(
+            config=_from_dict(d, "config", Config),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+        )
 
 
 @dataclass
@@ -4278,25 +4995,33 @@ class UpdatePersonalComputeSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalComputeSettingRequest:
         """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', PersonalComputeSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", PersonalComputeSetting),
+        )
 
 
 @dataclass
@@ -4341,25 +5066,33 @@ class UpdateRestrictWorkspaceAdminsSettingRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting.as_dict()
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
-        if self.field_mask is not None: body['field_mask'] = self.field_mask
-        if self.setting: body['setting'] = self.setting
+        if self.allow_missing is not None:
+            body["allow_missing"] = self.allow_missing
+        if self.field_mask is not None:
+            body["field_mask"] = self.field_mask
+        if self.setting:
+            body["setting"] = self.setting
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRestrictWorkspaceAdminsSettingRequest:
         """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary."""
-        return cls(allow_missing=d.get('allow_missing', None),
-                   field_mask=d.get('field_mask', None),
-                   setting=_from_dict(d, 'setting', RestrictWorkspaceAdminsSetting))
+        return cls(
+            allow_missing=d.get("allow_missing", None),
+            field_mask=d.get("field_mask", None),
+            setting=_from_dict(d, "setting", RestrictWorkspaceAdminsSetting),
+        )
 
 
 WorkspaceConf = Dict[str, str]
@@ -4368,132 +5101,157 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRestrictWorkspaceAdminsSettingReq
 class AccountIpAccessListsAPI:
     """The Accounts IP Access List API enables account admins to configure IP access lists for access to the
     account console.
-    
+
     Account IP Access Lists affect web application access and REST API access to the account console and
     account APIs. If the feature is disabled for the account, all access is allowed for this account. There is
     support for allow lists (inclusion) and block lists (exclusion).
-    
+
     When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
     matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
     lists**, the IP address is compared with the allow lists.
-    
+
     If there is at least one allow list for the account, the connection is allowed only if the IP address
     matches an allow list. If there are no allow lists for the account, all IP addresses are allowed.
-    
+
     For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where
     one CIDR counts as a single value.
-    
-    After changes to the account-level IP access lists, it can take a few minutes for changes to take effect."""
+
+    After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               label: str,
-               list_type: ListType,
-               *,
-               ip_addresses: Optional[List[str]] = None) -> CreateIpAccessListResponse:
+    def create(
+        self,
+        label: str,
+        list_type: ListType,
+        *,
+        ip_addresses: Optional[List[str]] = None,
+    ) -> CreateIpAccessListResponse:
         """Create access list.
-        
+
         Creates an IP access list for the account.
-        
+
         A list can be an allow list or a block list. See the top of this file for a description of how the
         server treats allow lists and block lists at runtime.
-        
+
         When creating or updating an IP access list:
-        
+
         * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
         where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
         `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
         error 400 is returned with `error_code` value `INVALID_STATE`.
-        
+
         It can take a few minutes for the changes to take effect.
-        
+
         :param label: str
           Label for the IP access list. This **cannot** be empty.
         :param list_type: :class:`ListType`
           Type of IP access list. Valid values are as follows and are case-sensitive:
-          
+
           * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
           range. IP addresses in the block list are excluded even if they are included in an allow list.
         :param ip_addresses: List[str] (optional)
-        
+
         :returns: :class:`CreateIpAccessListResponse`
         """
         body = {}
-        if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses]
-        if label is not None: body['label'] = label
-        if list_type is not None: body['list_type'] = list_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if ip_addresses is not None:
+            body["ip_addresses"] = [v for v in ip_addresses]
+        if label is not None:
+            body["label"] = label
+        if list_type is not None:
+            body["list_type"] = list_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists",
+            body=body,
+            headers=headers,
+        )
         return CreateIpAccessListResponse.from_dict(res)
 
     def delete(self, ip_access_list_id: str):
         """Delete access list.
-        
+
         Deletes an IP access list, specified by its list ID.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE',
-                     f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}',
-                     headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}",
+            headers=headers,
+        )
 
     def get(self, ip_access_list_id: str) -> GetIpAccessListResponse:
         """Get IP access list.
-        
+
         Gets an IP access list, specified by its list ID.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
-        
+
         :returns: :class:`GetIpAccessListResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}",
+            headers=headers,
+        )
         return GetIpAccessListResponse.from_dict(res)
 
     def list(self) -> Iterator[IpAccessListInfo]:
         """Get access lists.
-        
+
         Gets all IP access lists for the specified account.
-        
+
         :returns: Iterator over :class:`IpAccessListInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET',
-                            f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists',
-                            headers=headers)
+        json = self._api.do(
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists",
+            headers=headers,
+        )
         parsed = GetIpAccessListsResponse.from_dict(json).ip_access_lists
         return parsed if parsed is not None else []
 
-    def replace(self,
-                ip_access_list_id: str,
-                label: str,
-                list_type: ListType,
-                enabled: bool,
-                *,
-                ip_addresses: Optional[List[str]] = None):
+    def replace(
+        self,
+        ip_access_list_id: str,
+        label: str,
+        list_type: ListType,
+        enabled: bool,
+        *,
+        ip_addresses: Optional[List[str]] = None,
+    ):
         """Replace access list.
-        
+
         Replaces an IP access list, specified by its ID.
-        
+
         A list can include allow lists and block lists. See the top of this file for a description of how the
         server treats allow lists and block lists at run time. When replacing an IP access list: * For all
         allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
@@ -4501,57 +5259,68 @@ def replace(self,
         `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
         returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
         effect.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
         :param label: str
           Label for the IP access list. This **cannot** be empty.
         :param list_type: :class:`ListType`
           Type of IP access list. Valid values are as follows and are case-sensitive:
-          
+
           * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
           range. IP addresses in the block list are excluded even if they are included in an allow list.
         :param enabled: bool
           Specifies whether this IP access list is enabled.
         :param ip_addresses: List[str] (optional)
-        
-        
+
+
         """
         body = {}
-        if enabled is not None: body['enabled'] = enabled
-        if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses]
-        if label is not None: body['label'] = label
-        if list_type is not None: body['list_type'] = list_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PUT',
-                     f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}',
-                     body=body,
-                     headers=headers)
-
-    def update(self,
-               ip_access_list_id: str,
-               *,
-               enabled: Optional[bool] = None,
-               ip_addresses: Optional[List[str]] = None,
-               label: Optional[str] = None,
-               list_type: Optional[ListType] = None):
+        if enabled is not None:
+            body["enabled"] = enabled
+        if ip_addresses is not None:
+            body["ip_addresses"] = [v for v in ip_addresses]
+        if label is not None:
+            body["label"] = label
+        if list_type is not None:
+            body["list_type"] = list_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PUT",
+            f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        ip_access_list_id: str,
+        *,
+        enabled: Optional[bool] = None,
+        ip_addresses: Optional[List[str]] = None,
+        label: Optional[str] = None,
+        list_type: Optional[ListType] = None,
+    ):
         """Update access list.
-        
+
         Updates an existing IP access list, specified by its ID.
-        
+
         A list can include allow lists and block lists. See the top of this file for a description of how the
         server treats allow lists and block lists at run time.
-        
+
         When updating an IP access list:
-        
+
         * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
         where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
         `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
         error 400 is returned with `error_code` value `INVALID_STATE`.
-        
+
         It can take a few minutes for the changes to take effect.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
         :param enabled: bool (optional)
@@ -4561,23 +5330,32 @@ def update(self,
           Label for the IP access list. This **cannot** be empty.
         :param list_type: :class:`ListType` (optional)
           Type of IP access list. Valid values are as follows and are case-sensitive:
-          
+
           * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
           range. IP addresses in the block list are excluded even if they are included in an allow list.
-        
-        
+
+
         """
         body = {}
-        if enabled is not None: body['enabled'] = enabled
-        if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses]
-        if label is not None: body['label'] = label
-        if list_type is not None: body['list_type'] = list_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if enabled is not None:
+            body["enabled"] = enabled
+        if ip_addresses is not None:
+            body["ip_addresses"] = [v for v in ip_addresses]
+        if label is not None:
+            body["label"] = label
+        if list_type is not None:
+            body["list_type"] = list_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH',
-                     f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}',
-                     body=body,
-                     headers=headers)
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class AccountSettingsAPI:
@@ -4620,70 +5398,83 @@ def personal_compute(self) -> PersonalComputeAPI:
 
 class AibiDashboardEmbeddingAccessPolicyAPI:
     """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
-    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS)."""
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def delete(self,
-               *,
-               etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+    def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
         """Delete the AI/BI dashboard embedding access policy.
-        
+
         Delete the AI/BI dashboard embedding access policy, reverting back to the default.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE',
-                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default",
+            query=query,
+            headers=headers,
+        )
         return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting:
         """Retrieve the AI/BI dashboard embedding access policy.
-        
+
         Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
         permitting AI/BI dashboards to be embedded on approved domains.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default",
+            query=query,
+            headers=headers,
+        )
         return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting,
-               field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: AibiDashboardEmbeddingAccessPolicySetting,
+        field_mask: str,
+    ) -> AibiDashboardEmbeddingAccessPolicySetting:
         """Update the AI/BI dashboard embedding access policy.
-        
+
         Updates the AI/BI dashboard embedding access policy at the workspace level.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
@@ -4693,93 +5484,114 @@ def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolic
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default",
+            body=body,
+            headers=headers,
+        )
         return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res)
 
 
 class AibiDashboardEmbeddingApprovedDomainsAPI:
     """Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
-    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS."""
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def delete(self,
-               *,
-               etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+    def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
         """Delete AI/BI dashboard embedding approved domains.
-        
+
         Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
         empty list.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE',
-                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default",
+            query=query,
+            headers=headers,
+        )
         return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting:
         """Retrieve the list of domains approved to host embedded AI/BI dashboards.
-        
+
         Retrieves the list of domains approved to host embedded AI/BI dashboards.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default",
+            query=query,
+            headers=headers,
+        )
         return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting,
-               field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: AibiDashboardEmbeddingApprovedDomainsSetting,
+        field_mask: str,
+    ) -> AibiDashboardEmbeddingApprovedDomainsSetting:
         """Update the list of domains approved to host embedded AI/BI dashboards.
-        
+
         Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
         current workspace access policy is not ALLOW_APPROVED_DOMAINS.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
@@ -4789,23 +5601,31 @@ def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDom
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default",
+            body=body,
+            headers=headers,
+        )
         return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res)
 
 
@@ -4818,38 +5638,47 @@ def __init__(self, api_client):
 
     def get(self, *, etag: Optional[str] = None) -> AutomaticClusterUpdateSetting:
         """Get the automatic cluster update setting.
-        
+
         Gets the automatic cluster update setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`AutomaticClusterUpdateSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/automatic_cluster_update/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/automatic_cluster_update/names/default",
+            query=query,
+            headers=headers,
+        )
         return AutomaticClusterUpdateSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: AutomaticClusterUpdateSetting,
-               field_mask: str) -> AutomaticClusterUpdateSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: AutomaticClusterUpdateSetting,
+        field_mask: str,
+    ) -> AutomaticClusterUpdateSetting:
         """Update the automatic cluster update setting.
-        
+
         Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in
         `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request
         before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
         request must be retried by using the fresh etag in the 409 response.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AutomaticClusterUpdateSetting`
@@ -4859,30 +5688,38 @@ def update(self, allow_missing: bool, setting: AutomaticClusterUpdateSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`AutomaticClusterUpdateSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/automatic_cluster_update/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/automatic_cluster_update/names/default",
+            body=body,
+            headers=headers,
+        )
         return AutomaticClusterUpdateSetting.from_dict(res)
 
 
 class ComplianceSecurityProfileAPI:
     """Controls whether to enable the compliance security profile for the current workspace. Enabling it on a
     workspace is permanent. By default, it is turned off.
-    
+
     This settings can NOT be disabled once it is enabled."""
 
     def __init__(self, api_client):
@@ -4890,38 +5727,47 @@ def __init__(self, api_client):
 
     def get(self, *, etag: Optional[str] = None) -> ComplianceSecurityProfileSetting:
         """Get the compliance security profile setting.
-        
+
         Gets the compliance security profile setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`ComplianceSecurityProfileSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default",
+            query=query,
+            headers=headers,
+        )
         return ComplianceSecurityProfileSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: ComplianceSecurityProfileSetting,
-               field_mask: str) -> ComplianceSecurityProfileSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: ComplianceSecurityProfileSetting,
+        field_mask: str,
+    ) -> ComplianceSecurityProfileSetting:
         """Update the compliance security profile setting.
-        
+
         Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided
         in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
         request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
         the request must be retried by using the fresh etag in the 409 response.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`ComplianceSecurityProfileSetting`
@@ -4931,23 +5777,31 @@ def update(self, allow_missing: bool, setting: ComplianceSecurityProfileSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`ComplianceSecurityProfileSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default",
+            body=body,
+            headers=headers,
+        )
         return ComplianceSecurityProfileSetting.from_dict(res)
 
 
@@ -4958,32 +5812,44 @@ class CredentialsManagerAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def exchange_token(self, partition_id: PartitionId, token_type: List[TokenType],
-                       scopes: List[str]) -> ExchangeTokenResponse:
+    def exchange_token(
+        self,
+        partition_id: PartitionId,
+        token_type: List[TokenType],
+        scopes: List[str],
+    ) -> ExchangeTokenResponse:
         """Exchange token.
-        
+
         Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to
         determine token permissions.
-        
+
         :param partition_id: :class:`PartitionId`
           The partition of Credentials store
         :param token_type: List[:class:`TokenType`]
           A list of token types being requested
         :param scopes: List[str]
           Array of scopes for the token request.
-        
+
         :returns: :class:`ExchangeTokenResponse`
         """
         body = {}
-        if partition_id is not None: body['partitionId'] = partition_id.as_dict()
-        if scopes is not None: body['scopes'] = [v for v in scopes]
-        if token_type is not None: body['tokenType'] = [v.value for v in token_type]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if partition_id is not None:
+            body["partitionId"] = partition_id.as_dict()
+        if scopes is not None:
+            body["scopes"] = [v for v in scopes]
+        if token_type is not None:
+            body["tokenType"] = [v.value for v in token_type]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           '/api/2.0/credentials-manager/exchange-tokens/token',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/credentials-manager/exchange-tokens/token",
+            body=body,
+            headers=headers,
+        )
         return ExchangeTokenResponse.from_dict(res)
 
 
@@ -4991,7 +5857,7 @@ class CspEnablementAccountAPI:
     """The compliance security profile settings at the account level control whether to enable it for new
     workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
     creation, account admins can enable the compliance security profile individually for each workspace.
-    
+
     This settings can be disabled so that new workspaces do not have compliance security profile enabled by
     default."""
 
@@ -5000,36 +5866,44 @@ def __init__(self, api_client):
 
     def get(self, *, etag: Optional[str] = None) -> CspEnablementAccountSetting:
         """Get the compliance security profile setting for new workspaces.
-        
+
         Gets the compliance security profile setting for new workspaces.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`CspEnablementAccountSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default',
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return CspEnablementAccountSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: CspEnablementAccountSetting,
-               field_mask: str) -> CspEnablementAccountSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: CspEnablementAccountSetting,
+        field_mask: str,
+    ) -> CspEnablementAccountSetting:
         """Update the compliance security profile setting for new workspaces.
-        
+
         Updates the value of the compliance security profile setting for new workspaces.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`CspEnablementAccountSetting`
@@ -5039,36 +5913,43 @@ def update(self, allow_missing: bool, setting: CspEnablementAccountSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`CspEnablementAccountSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return CspEnablementAccountSetting.from_dict(res)
 
 
 class DefaultNamespaceAPI:
     """The default namespace setting API allows users to configure the default namespace for a Databricks
     workspace.
-    
+
     Through this API, users can retrieve, set, or modify the default namespace used when queries do not
     reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
     default catalog, then a query 'SELECT * FROM myTable' would reference the object
     'retail_prod.default.myTable' (the schema 'default' is always assumed).
-    
+
     This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
     namespace only applies when using Unity Catalog-enabled compute."""
 
@@ -5077,68 +5958,82 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeleteDefaultNamespaceSettingResponse:
         """Delete the default namespace setting.
-        
+
         Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE`
         requests (as a query parameter). The etag can be retrieved by making a `GET` request before the
         `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the
         request must be retried by using the fresh etag in the 409 response.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteDefaultNamespaceSettingResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE',
-                           '/api/2.0/settings/types/default_namespace_ws/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/settings/types/default_namespace_ws/names/default",
+            query=query,
+            headers=headers,
+        )
         return DeleteDefaultNamespaceSettingResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> DefaultNamespaceSetting:
         """Get the default namespace setting.
-        
+
         Gets the default namespace setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DefaultNamespaceSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/default_namespace_ws/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/default_namespace_ws/names/default",
+            query=query,
+            headers=headers,
+        )
         return DefaultNamespaceSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: DefaultNamespaceSetting,
-               field_mask: str) -> DefaultNamespaceSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: DefaultNamespaceSetting,
+        field_mask: str,
+    ) -> DefaultNamespaceSetting:
         """Update the default namespace setting.
-        
+
         Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH`
         requests (as part of the setting field). The etag can be retrieved by making a `GET` request before
         the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the
         etag is present in the error response, which should be set in the `PATCH` request. If the setting is
         updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag
         in the 409 response.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DefaultNamespaceSetting`
@@ -5155,29 +6050,37 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`DefaultNamespaceSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/default_namespace_ws/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/default_namespace_ws/names/default",
+            body=body,
+            headers=headers,
+        )
         return DefaultNamespaceSetting.from_dict(res)
 
 
 class DisableLegacyAccessAPI:
     """'Disabling legacy access' has the following impacts:
-    
+
     1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
     Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
     Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
@@ -5188,60 +6091,74 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse:
         """Delete Legacy Access Disablement Status.
-        
+
         Deletes legacy access disablement status.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteDisableLegacyAccessResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE',
-                           '/api/2.0/settings/types/disable_legacy_access/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/settings/types/disable_legacy_access/names/default",
+            query=query,
+            headers=headers,
+        )
         return DeleteDisableLegacyAccessResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess:
         """Retrieve Legacy Access Disablement Status.
-        
+
         Retrieves legacy access disablement Status.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DisableLegacyAccess`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/disable_legacy_access/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/disable_legacy_access/names/default",
+            query=query,
+            headers=headers,
+        )
         return DisableLegacyAccess.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: DisableLegacyAccess,
-               field_mask: str) -> DisableLegacyAccess:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: DisableLegacyAccess,
+        field_mask: str,
+    ) -> DisableLegacyAccess:
         """Update Legacy Access Disablement Status.
-        
+
         Updates legacy access disablement status.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyAccess`
@@ -5251,23 +6168,31 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`DisableLegacyAccess`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/disable_legacy_access/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/disable_legacy_access/names/default",
+            body=body,
+            headers=headers,
+        )
         return DisableLegacyAccess.from_dict(res)
 
 
@@ -5280,59 +6205,69 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse:
         """Delete the disable legacy DBFS setting.
-        
+
         Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteDisableLegacyDbfsResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE',
-                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/settings/types/disable_legacy_dbfs/names/default",
+            query=query,
+            headers=headers,
+        )
         return DeleteDisableLegacyDbfsResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs:
         """Get the disable legacy DBFS setting.
-        
+
         Gets the disable legacy DBFS setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DisableLegacyDbfs`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/disable_legacy_dbfs/names/default",
+            query=query,
+            headers=headers,
+        )
         return DisableLegacyDbfs.from_dict(res)
 
     def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs:
         """Update the disable legacy DBFS setting.
-        
+
         Updates the disable legacy DBFS setting for the workspace.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyDbfs`
@@ -5342,29 +6277,37 @@ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: st
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`DisableLegacyDbfs`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/disable_legacy_dbfs/names/default",
+            body=body,
+            headers=headers,
+        )
         return DisableLegacyDbfs.from_dict(res)
 
 
 class DisableLegacyFeaturesAPI:
     """Disable legacy features for new Databricks workspaces.
-    
+
     For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
     provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
     prior to 13.3LTS."""
@@ -5374,62 +6317,74 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse:
         """Delete the disable legacy features setting.
-        
+
         Deletes the disable legacy features setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteDisableLegacyFeaturesResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return DeleteDisableLegacyFeaturesResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures:
         """Get the disable legacy features setting.
-        
+
         Gets the value of the disable legacy features setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DisableLegacyFeatures`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return DisableLegacyFeatures.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
-               field_mask: str) -> DisableLegacyFeatures:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: DisableLegacyFeatures,
+        field_mask: str,
+    ) -> DisableLegacyFeatures:
         """Update the disable legacy features setting.
-        
+
         Updates the value of the disable legacy features setting.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyFeatures`
@@ -5439,24 +6394,31 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`DisableLegacyFeatures`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return DisableLegacyFeatures.from_dict(res)
 
 
@@ -5469,62 +6431,74 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse:
         """Delete the account IP access toggle setting.
-        
+
         Reverts the value of the account IP access toggle setting to default (ON)
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteAccountIpAccessEnableResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return DeleteAccountIpAccessEnableResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable:
         """Get the account IP access toggle setting.
-        
+
         Gets the value of the account IP access toggle setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`AccountIpAccessEnable`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return AccountIpAccessEnable.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: AccountIpAccessEnable,
-               field_mask: str) -> AccountIpAccessEnable:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: AccountIpAccessEnable,
+        field_mask: str,
+    ) -> AccountIpAccessEnable:
         """Update the account IP access toggle setting.
-        
+
         Updates the value of the account IP access toggle setting.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AccountIpAccessEnable`
@@ -5534,24 +6508,31 @@ def update(self, allow_missing: bool, setting: AccountIpAccessEnable,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`AccountIpAccessEnable`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return AccountIpAccessEnable.from_dict(res)
 
 
@@ -5559,7 +6540,7 @@ class EnhancedSecurityMonitoringAPI:
     """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
     security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
     compliance security profile is enabled, this is automatically enabled.
-    
+
     If the compliance security profile is disabled, you can enable or disable this setting and it is not
     permanent."""
 
@@ -5568,38 +6549,47 @@ def __init__(self, api_client):
 
     def get(self, *, etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting:
         """Get the enhanced security monitoring setting.
-        
+
         Gets the enhanced security monitoring setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`EnhancedSecurityMonitoringSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default",
+            query=query,
+            headers=headers,
+        )
         return EnhancedSecurityMonitoringSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting,
-               field_mask: str) -> EnhancedSecurityMonitoringSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: EnhancedSecurityMonitoringSetting,
+        field_mask: str,
+    ) -> EnhancedSecurityMonitoringSetting:
         """Update the enhanced security monitoring setting.
-        
+
         Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided
         in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
         request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
         the request must be retried by using the fresh etag in the 409 response.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EnhancedSecurityMonitoringSetting`
@@ -5609,66 +6599,83 @@ def update(self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`EnhancedSecurityMonitoringSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default",
+            body=body,
+            headers=headers,
+        )
         return EnhancedSecurityMonitoringSetting.from_dict(res)
 
 
 class EsmEnablementAccountAPI:
     """The enhanced security monitoring setting at the account level controls whether to enable the feature on
     new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-    creation, account admins can enable enhanced security monitoring individually for each workspace."""
+    creation, account admins can enable enhanced security monitoring individually for each workspace.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def get(self, *, etag: Optional[str] = None) -> EsmEnablementAccountSetting:
         """Get the enhanced security monitoring setting for new workspaces.
-        
+
         Gets the enhanced security monitoring setting for new workspaces.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`EsmEnablementAccountSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default',
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return EsmEnablementAccountSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: EsmEnablementAccountSetting,
-               field_mask: str) -> EsmEnablementAccountSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: EsmEnablementAccountSetting,
+        field_mask: str,
+    ) -> EsmEnablementAccountSetting:
         """Update the enhanced security monitoring setting for new workspaces.
-        
+
         Updates the value of the enhanced security monitoring setting for new workspaces.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EsmEnablementAccountSetting`
@@ -5678,147 +6685,179 @@ def update(self, allow_missing: bool, setting: EsmEnablementAccountSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`EsmEnablementAccountSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return EsmEnablementAccountSetting.from_dict(res)
 
 
 class IpAccessListsAPI:
     """IP Access List enables admins to configure IP access lists.
-    
+
     IP access lists affect web application access and REST API access to this workspace only. If the feature
     is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists
     (inclusion) and block lists (exclusion).
-    
+
     When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
     matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
     lists**, the IP address is compared with the allow lists.
-    
+
     If there is at least one allow list for the workspace, the connection is allowed only if the IP address
     matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed.
-    
+
     For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values,
     where one CIDR counts as a single value.
-    
-    After changes to the IP access list feature, it can take a few minutes for changes to take effect."""
+
+    After changes to the IP access list feature, it can take a few minutes for changes to take effect.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               label: str,
-               list_type: ListType,
-               *,
-               ip_addresses: Optional[List[str]] = None) -> CreateIpAccessListResponse:
+    def create(
+        self,
+        label: str,
+        list_type: ListType,
+        *,
+        ip_addresses: Optional[List[str]] = None,
+    ) -> CreateIpAccessListResponse:
         """Create access list.
-        
+
         Creates an IP access list for this workspace.
-        
+
         A list can be an allow list or a block list. See the top of this file for a description of how the
         server treats allow lists and block lists at runtime.
-        
+
         When creating or updating an IP access list:
-        
+
         * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
         where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
         `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
         error 400 is returned with `error_code` value `INVALID_STATE`.
-        
+
         It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no
         effect until you enable the feature. See :method:workspaceconf/setStatus
-        
+
         :param label: str
           Label for the IP access list. This **cannot** be empty.
         :param list_type: :class:`ListType`
           Type of IP access list. Valid values are as follows and are case-sensitive:
-          
+
           * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
           range. IP addresses in the block list are excluded even if they are included in an allow list.
         :param ip_addresses: List[str] (optional)
-        
+
         :returns: :class:`CreateIpAccessListResponse`
         """
         body = {}
-        if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses]
-        if label is not None: body['label'] = label
-        if list_type is not None: body['list_type'] = list_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/ip-access-lists', body=body, headers=headers)
+        if ip_addresses is not None:
+            body["ip_addresses"] = [v for v in ip_addresses]
+        if label is not None:
+            body["label"] = label
+        if list_type is not None:
+            body["list_type"] = list_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/ip-access-lists", body=body, headers=headers)
         return CreateIpAccessListResponse.from_dict(res)
 
     def delete(self, ip_access_list_id: str):
         """Delete access list.
-        
+
         Deletes an IP access list, specified by its list ID.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/ip-access-lists/{ip_access_list_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/ip-access-lists/{ip_access_list_id}",
+            headers=headers,
+        )
 
     def get(self, ip_access_list_id: str) -> FetchIpAccessListResponse:
         """Get access list.
-        
+
         Gets an IP access list, specified by its list ID.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
-        
+
         :returns: :class:`FetchIpAccessListResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/ip-access-lists/{ip_access_list_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/ip-access-lists/{ip_access_list_id}",
+            headers=headers,
+        )
         return FetchIpAccessListResponse.from_dict(res)
 
     def list(self) -> Iterator[IpAccessListInfo]:
         """Get access lists.
-        
+
         Gets all IP access lists for the specified workspace.
-        
+
         :returns: Iterator over :class:`IpAccessListInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/ip-access-lists', headers=headers)
+        json = self._api.do("GET", "/api/2.0/ip-access-lists", headers=headers)
         parsed = ListIpAccessListResponse.from_dict(json).ip_access_lists
         return parsed if parsed is not None else []
 
-    def replace(self,
-                ip_access_list_id: str,
-                label: str,
-                list_type: ListType,
-                enabled: bool,
-                *,
-                ip_addresses: Optional[List[str]] = None):
+    def replace(
+        self,
+        ip_access_list_id: str,
+        label: str,
+        list_type: ListType,
+        enabled: bool,
+        *,
+        ip_addresses: Optional[List[str]] = None,
+    ):
         """Replace access list.
-        
+
         Replaces an IP access list, specified by its ID.
-        
+
         A list can include allow lists and block lists. See the top of this file for a description of how the
         server treats allow lists and block lists at run time. When replacing an IP access list: * For all
         allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
@@ -5827,55 +6866,69 @@ def replace(self,
         returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
         effect. Note that your resulting IP access list has no effect until you enable the feature. See
         :method:workspaceconf/setStatus.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
         :param label: str
           Label for the IP access list. This **cannot** be empty.
         :param list_type: :class:`ListType`
           Type of IP access list. Valid values are as follows and are case-sensitive:
-          
+
           * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
           range. IP addresses in the block list are excluded even if they are included in an allow list.
         :param enabled: bool
           Specifies whether this IP access list is enabled.
         :param ip_addresses: List[str] (optional)
-        
-        
+
+
         """
         body = {}
-        if enabled is not None: body['enabled'] = enabled
-        if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses]
-        if label is not None: body['label'] = label
-        if list_type is not None: body['list_type'] = list_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PUT', f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body, headers=headers)
-
-    def update(self,
-               ip_access_list_id: str,
-               *,
-               enabled: Optional[bool] = None,
-               ip_addresses: Optional[List[str]] = None,
-               label: Optional[str] = None,
-               list_type: Optional[ListType] = None):
+        if enabled is not None:
+            body["enabled"] = enabled
+        if ip_addresses is not None:
+            body["ip_addresses"] = [v for v in ip_addresses]
+        if label is not None:
+            body["label"] = label
+        if list_type is not None:
+            body["list_type"] = list_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PUT",
+            f"/api/2.0/ip-access-lists/{ip_access_list_id}",
+            body=body,
+            headers=headers,
+        )
+
+    def update(
+        self,
+        ip_access_list_id: str,
+        *,
+        enabled: Optional[bool] = None,
+        ip_addresses: Optional[List[str]] = None,
+        label: Optional[str] = None,
+        list_type: Optional[ListType] = None,
+    ):
         """Update access list.
-        
+
         Updates an existing IP access list, specified by its ID.
-        
+
         A list can include allow lists and block lists. See the top of this file for a description of how the
         server treats allow lists and block lists at run time.
-        
+
         When updating an IP access list:
-        
+
         * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
         where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
         `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
         error 400 is returned with `error_code` value `INVALID_STATE`.
-        
+
         It can take a few minutes for the changes to take effect. Note that your resulting IP access list has
         no effect until you enable the feature. See :method:workspaceconf/setStatus.
-        
+
         :param ip_access_list_id: str
           The ID for the corresponding IP access list
         :param enabled: bool (optional)
@@ -5885,20 +6938,32 @@ def update(self,
           Label for the IP access list. This **cannot** be empty.
         :param list_type: :class:`ListType` (optional)
           Type of IP access list. Valid values are as follows and are case-sensitive:
-          
+
           * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
           range. IP addresses in the block list are excluded even if they are included in an allow list.
-        
-        
+
+
         """
         body = {}
-        if enabled is not None: body['enabled'] = enabled
-        if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses]
-        if label is not None: body['label'] = label
-        if list_type is not None: body['list_type'] = list_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if enabled is not None:
+            body["enabled"] = enabled
+        if ip_addresses is not None:
+            body["ip_addresses"] = [v for v in ip_addresses]
+        if label is not None:
+            body["label"] = label
+        if list_type is not None:
+            body["list_type"] = list_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH', f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body, headers=headers)
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/ip-access-lists/{ip_access_list_id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class NetworkConnectivityAPI:
@@ -5908,10 +6973,9 @@ class NetworkConnectivityAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create_network_connectivity_configuration(self, name: str,
-                                                  region: str) -> NetworkConnectivityConfiguration:
+    def create_network_connectivity_configuration(self, name: str, region: str) -> NetworkConnectivityConfiguration:
         """Create a network connectivity configuration.
-        
+
         :param name: str
           The name of the network connectivity configuration. The name can contain alphanumeric characters,
           hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the
@@ -5919,35 +6983,45 @@ def create_network_connectivity_configuration(self, name: str,
         :param region: str
           The region for the network connectivity configuration. Only workspaces in the same region can be
           attached to the network connectivity configuration.
-        
+
         :returns: :class:`NetworkConnectivityConfiguration`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if region is not None: body['region'] = region
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if name is not None:
+            body["name"] = name
+        if region is not None:
+            body["region"] = region
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs",
+            body=body,
+            headers=headers,
+        )
         return NetworkConnectivityConfiguration.from_dict(res)
 
     def create_private_endpoint_rule(
-            self, network_connectivity_config_id: str, resource_id: str,
-            group_id: CreatePrivateEndpointRuleRequestGroupId) -> NccAzurePrivateEndpointRule:
+        self,
+        network_connectivity_config_id: str,
+        resource_id: str,
+        group_id: CreatePrivateEndpointRuleRequestGroupId,
+    ) -> NccAzurePrivateEndpointRule:
         """Create a private endpoint rule.
-        
+
         Create a private endpoint rule for the specified network connectivity config object. Once the object
         is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure
         resource.
-        
+
         **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to
         complete the connection. To get the information of the private endpoint created, make a `GET` request
         on the new private endpoint rule. See [serverless private link].
-        
+
         [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
-        
+
         :param network_connectivity_config_id: str
           Your Network Connectvity Configuration ID.
         :param resource_id: str
@@ -5955,169 +7029,203 @@ def create_private_endpoint_rule(
         :param group_id: :class:`CreatePrivateEndpointRuleRequestGroupId`
           The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
           storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
-        
+
         :returns: :class:`NccAzurePrivateEndpointRule`
         """
         body = {}
-        if group_id is not None: body['group_id'] = group_id.value
-        if resource_id is not None: body['resource_id'] = resource_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if group_id is not None:
+            body["group_id"] = group_id.value
+        if resource_id is not None:
+            body["resource_id"] = resource_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'POST',
-            f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules',
+            "POST",
+            f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return NccAzurePrivateEndpointRule.from_dict(res)
 
     def delete_network_connectivity_configuration(self, network_connectivity_config_id: str):
         """Delete a network connectivity configuration.
-        
+
         Deletes a network connectivity configuration.
-        
+
         :param network_connectivity_config_id: str
           Your Network Connectvity Configuration ID.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}',
-            headers=headers)
-
-    def delete_private_endpoint_rule(self, network_connectivity_config_id: str,
-                                     private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule:
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}",
+            headers=headers,
+        )
+
+    def delete_private_endpoint_rule(
+        self,
+        network_connectivity_config_id: str,
+        private_endpoint_rule_id: str,
+    ) -> NccAzurePrivateEndpointRule:
         """Delete a private endpoint rule.
-        
+
         Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private
         endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted
         after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is
         set to `true` and the private endpoint is not available to your serverless compute resources.
-        
+
         :param network_connectivity_config_id: str
           Your Network Connectvity Configuration ID.
         :param private_endpoint_rule_id: str
           Your private endpoint rule ID.
-        
+
         :returns: :class:`NccAzurePrivateEndpointRule`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}',
-            headers=headers)
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}",
+            headers=headers,
+        )
         return NccAzurePrivateEndpointRule.from_dict(res)
 
     def get_network_connectivity_configuration(
-            self, network_connectivity_config_id: str) -> NetworkConnectivityConfiguration:
+        self, network_connectivity_config_id: str
+    ) -> NetworkConnectivityConfiguration:
         """Get a network connectivity configuration.
-        
+
         Gets a network connectivity configuration.
-        
+
         :param network_connectivity_config_id: str
           Your Network Connectvity Configuration ID.
-        
+
         :returns: :class:`NetworkConnectivityConfiguration`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}",
+            headers=headers,
+        )
         return NetworkConnectivityConfiguration.from_dict(res)
 
-    def get_private_endpoint_rule(self, network_connectivity_config_id: str,
-                                  private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule:
+    def get_private_endpoint_rule(
+        self,
+        network_connectivity_config_id: str,
+        private_endpoint_rule_id: str,
+    ) -> NccAzurePrivateEndpointRule:
         """Get a private endpoint rule.
-        
+
         Gets the private endpoint rule.
-        
+
         :param network_connectivity_config_id: str
           Your Network Connectvity Configuration ID.
         :param private_endpoint_rule_id: str
           Your private endpoint rule ID.
-        
+
         :returns: :class:`NccAzurePrivateEndpointRule`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}',
-            headers=headers)
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}",
+            headers=headers,
+        )
         return NccAzurePrivateEndpointRule.from_dict(res)
 
-    def list_network_connectivity_configurations(self,
-                                                 *,
-                                                 page_token: Optional[str] = None
-                                                 ) -> Iterator[NetworkConnectivityConfiguration]:
+    def list_network_connectivity_configurations(
+        self, *, page_token: Optional[str] = None
+    ) -> Iterator[NetworkConnectivityConfiguration]:
         """List network connectivity configurations.
-        
+
         Gets an array of network connectivity configurations.
-        
+
         :param page_token: str (optional)
           Pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`NetworkConnectivityConfiguration`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs',
-                                query=query,
-                                headers=headers)
-            if 'items' in json:
-                for v in json['items']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs",
+                query=query,
+                headers=headers,
+            )
+            if "items" in json:
+                for v in json["items"]:
                     yield NetworkConnectivityConfiguration.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def list_private_endpoint_rules(
-            self,
-            network_connectivity_config_id: str,
-            *,
-            page_token: Optional[str] = None) -> Iterator[NccAzurePrivateEndpointRule]:
+        self,
+        network_connectivity_config_id: str,
+        *,
+        page_token: Optional[str] = None,
+    ) -> Iterator[NccAzurePrivateEndpointRule]:
         """List private endpoint rules.
-        
+
         Gets an array of private endpoint rules.
-        
+
         :param network_connectivity_config_id: str
           Your Network Connectvity Configuration ID.
         :param page_token: str (optional)
           Pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`NccAzurePrivateEndpointRule`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
             json = self._api.do(
-                'GET',
-                f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules',
+                "GET",
+                f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules",
                 query=query,
-                headers=headers)
-            if 'items' in json:
-                for v in json['items']:
+                headers=headers,
+            )
+            if "items" in json:
+                for v in json["items"]:
                     yield NccAzurePrivateEndpointRule.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class NotificationDestinationsAPI:
@@ -6129,111 +7237,154 @@ class NotificationDestinationsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               config: Optional[Config] = None,
-               display_name: Optional[str] = None) -> NotificationDestination:
+    def create(
+        self,
+        *,
+        config: Optional[Config] = None,
+        display_name: Optional[str] = None,
+    ) -> NotificationDestination:
         """Create a notification destination.
-        
+
         Creates a notification destination. Requires workspace admin permissions.
-        
+
         :param config: :class:`Config` (optional)
           The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
         :param display_name: str (optional)
           The display name for the notification destination.
-        
+
         :returns: :class:`NotificationDestination`
         """
         body = {}
-        if config is not None: body['config'] = config.as_dict()
-        if display_name is not None: body['display_name'] = display_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if config is not None:
+            body["config"] = config.as_dict()
+        if display_name is not None:
+            body["display_name"] = display_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/notification-destinations', body=body, headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/notification-destinations",
+            body=body,
+            headers=headers,
+        )
         return NotificationDestination.from_dict(res)
 
     def delete(self, id: str):
         """Delete a notification destination.
-        
+
         Deletes a notification destination. Requires workspace admin permissions.
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/notification-destinations/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/notification-destinations/{id}",
+            headers=headers,
+        )
 
     def get(self, id: str) -> NotificationDestination:
         """Get a notification destination.
-        
+
         Gets a notification destination.
-        
+
         :param id: str
-        
+
         :returns: :class:`NotificationDestination`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/notification-destinations/{id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/notification-destinations/{id}", headers=headers)
         return NotificationDestination.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ListNotificationDestinationsResult]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ListNotificationDestinationsResult]:
         """List notification destinations.
-        
+
         Lists notification destinations.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ListNotificationDestinationsResult`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/notification-destinations', query=query, headers=headers)
-            if 'results' in json:
-                for v in json['results']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/notification-destinations",
+                query=query,
+                headers=headers,
+            )
+            if "results" in json:
+                for v in json["results"]:
                     yield ListNotificationDestinationsResult.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               id: str,
-               *,
-               config: Optional[Config] = None,
-               display_name: Optional[str] = None) -> NotificationDestination:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        id: str,
+        *,
+        config: Optional[Config] = None,
+        display_name: Optional[str] = None,
+    ) -> NotificationDestination:
         """Update a notification destination.
-        
+
         Updates a notification destination. Requires workspace admin permissions. At least one field is
         required in the request body.
-        
+
         :param id: str
           UUID identifying notification destination.
         :param config: :class:`Config` (optional)
           The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
         :param display_name: str (optional)
           The display name for the notification destination.
-        
+
         :returns: :class:`NotificationDestination`
         """
         body = {}
-        if config is not None: body['config'] = config.as_dict()
-        if display_name is not None: body['display_name'] = display_name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if config is not None:
+            body["config"] = config.as_dict()
+        if display_name is not None:
+            body["display_name"] = display_name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.0/notification-destinations/{id}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/notification-destinations/{id}",
+            body=body,
+            headers=headers,
+        )
         return NotificationDestination.from_dict(res)
 
 
@@ -6241,7 +7392,7 @@ class PersonalComputeAPI:
     """The Personal Compute enablement setting lets you control which users can use the Personal Compute default
     policy to create compute resources. By default all users in all workspaces have access (ON), but you can
     change the setting to instead let individual workspaces configure access control (DELEGATE).
-    
+
     There is only one instance of this setting per account. Since this setting has a default value, this
     setting is present on all accounts even though it's never set on a given account. Deletion reverts the
     value of the setting back to the default value."""
@@ -6251,62 +7402,74 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeletePersonalComputeSettingResponse:
         """Delete Personal Compute setting.
-        
+
         Reverts back the Personal Compute setting value to default (ON)
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeletePersonalComputeSettingResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'DELETE',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default',
+            "DELETE",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return DeletePersonalComputeSettingResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> PersonalComputeSetting:
         """Get Personal Compute setting.
-        
+
         Gets the value of the Personal Compute setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`PersonalComputeSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default',
+            "GET",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default",
             query=query,
-            headers=headers)
+            headers=headers,
+        )
         return PersonalComputeSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: PersonalComputeSetting,
-               field_mask: str) -> PersonalComputeSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: PersonalComputeSetting,
+        field_mask: str,
+    ) -> PersonalComputeSetting:
         """Update Personal Compute setting.
-        
+
         Updates the value of the Personal Compute setting.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`PersonalComputeSetting`
@@ -6316,24 +7479,31 @@ def update(self, allow_missing: bool, setting: PersonalComputeSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`PersonalComputeSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
         res = self._api.do(
-            'PATCH',
-            f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default',
+            "PATCH",
+            f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default",
             body=body,
-            headers=headers)
+            headers=headers,
+        )
         return PersonalComputeSetting.from_dict(res)
 
 
@@ -6353,66 +7523,80 @@ def __init__(self, api_client):
 
     def delete(self, *, etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdminsSettingResponse:
         """Delete the restrict workspace admins setting.
-        
+
         Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be
         provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET`
         request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with
         409 and the request must be retried by using the fresh etag in the 409 response.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('DELETE',
-                           '/api/2.0/settings/types/restrict_workspace_admins/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "DELETE",
+            "/api/2.0/settings/types/restrict_workspace_admins/names/default",
+            query=query,
+            headers=headers,
+        )
         return DeleteRestrictWorkspaceAdminsSettingResponse.from_dict(res)
 
     def get(self, *, etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting:
         """Get the restrict workspace admins setting.
-        
+
         Gets the restrict workspace admins setting.
-        
+
         :param etag: str (optional)
           etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
           optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
           each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
           to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
           request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
+
         :returns: :class:`RestrictWorkspaceAdminsSetting`
         """
 
         query = {}
-        if etag is not None: query['etag'] = etag
-        headers = {'Accept': 'application/json', }
+        if etag is not None:
+            query["etag"] = etag
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/settings/types/restrict_workspace_admins/names/default',
-                           query=query,
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/settings/types/restrict_workspace_admins/names/default",
+            query=query,
+            headers=headers,
+        )
         return RestrictWorkspaceAdminsSetting.from_dict(res)
 
-    def update(self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting,
-               field_mask: str) -> RestrictWorkspaceAdminsSetting:
+    def update(
+        self,
+        allow_missing: bool,
+        setting: RestrictWorkspaceAdminsSetting,
+        field_mask: str,
+    ) -> RestrictWorkspaceAdminsSetting:
         """Update the restrict workspace admins setting.
-        
+
         Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in
         `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request
         before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
         request must be retried by using the fresh etag in the 409 response.
-        
+
         :param allow_missing: bool
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`RestrictWorkspaceAdminsSetting`
@@ -6422,23 +7606,31 @@ def update(self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
-        
+
         :returns: :class:`RestrictWorkspaceAdminsSetting`
         """
         body = {}
-        if allow_missing is not None: body['allow_missing'] = allow_missing
-        if field_mask is not None: body['field_mask'] = field_mask
-        if setting is not None: body['setting'] = setting.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if allow_missing is not None:
+            body["allow_missing"] = allow_missing
+        if field_mask is not None:
+            body["field_mask"] = field_mask
+        if setting is not None:
+            body["setting"] = setting.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           '/api/2.0/settings/types/restrict_workspace_admins/names/default',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/settings/types/restrict_workspace_admins/names/default",
+            body=body,
+            headers=headers,
+        )
         return RestrictWorkspaceAdminsSetting.from_dict(res)
 
 
@@ -6459,12 +7651,16 @@ def __init__(self, api_client):
         self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
 
     @property
-    def aibi_dashboard_embedding_access_policy(self) -> AibiDashboardEmbeddingAccessPolicyAPI:
+    def aibi_dashboard_embedding_access_policy(
+        self,
+    ) -> AibiDashboardEmbeddingAccessPolicyAPI:
         """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level."""
         return self._aibi_dashboard_embedding_access_policy
 
     @property
-    def aibi_dashboard_embedding_approved_domains(self) -> AibiDashboardEmbeddingApprovedDomainsAPI:
+    def aibi_dashboard_embedding_approved_domains(
+        self,
+    ) -> AibiDashboardEmbeddingApprovedDomainsAPI:
         """Controls the list of domains approved to host the embedded AI/BI dashboards."""
         return self._aibi_dashboard_embedding_approved_domains
 
@@ -6506,164 +7702,222 @@ def restrict_workspace_admins(self) -> RestrictWorkspaceAdminsAPI:
 
 class TokenManagementAPI:
     """Enables administrators to get all tokens and delete tokens for other users. Admins can either get every
-    token, get a specific token by ID, or get all tokens for a particular user."""
+    token, get a specific token by ID, or get all tokens for a particular user.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create_obo_token(self,
-                         application_id: str,
-                         *,
-                         comment: Optional[str] = None,
-                         lifetime_seconds: Optional[int] = None) -> CreateOboTokenResponse:
+    def create_obo_token(
+        self,
+        application_id: str,
+        *,
+        comment: Optional[str] = None,
+        lifetime_seconds: Optional[int] = None,
+    ) -> CreateOboTokenResponse:
         """Create on-behalf token.
-        
+
         Creates a token on behalf of a service principal.
-        
+
         :param application_id: str
           Application ID of the service principal.
         :param comment: str (optional)
           Comment that describes the purpose of the token.
         :param lifetime_seconds: int (optional)
           The number of seconds before the token expires.
-        
+
         :returns: :class:`CreateOboTokenResponse`
         """
         body = {}
-        if application_id is not None: body['application_id'] = application_id
-        if comment is not None: body['comment'] = comment
-        if lifetime_seconds is not None: body['lifetime_seconds'] = lifetime_seconds
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if application_id is not None:
+            body["application_id"] = application_id
+        if comment is not None:
+            body["comment"] = comment
+        if lifetime_seconds is not None:
+            body["lifetime_seconds"] = lifetime_seconds
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST',
-                           '/api/2.0/token-management/on-behalf-of/tokens',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "POST",
+            "/api/2.0/token-management/on-behalf-of/tokens",
+            body=body,
+            headers=headers,
+        )
         return CreateOboTokenResponse.from_dict(res)
 
     def delete(self, token_id: str):
         """Delete a token.
-        
+
         Deletes a token, specified by its ID.
-        
+
         :param token_id: str
           The ID of the token to revoke.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/token-management/tokens/{token_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/token-management/tokens/{token_id}",
+            headers=headers,
+        )
 
     def get(self, token_id: str) -> GetTokenResponse:
         """Get token info.
-        
+
         Gets information about a token, specified by its ID.
-        
+
         :param token_id: str
           The ID of the token to get.
-        
+
         :returns: :class:`GetTokenResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/token-management/tokens/{token_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/token-management/tokens/{token_id}",
+            headers=headers,
+        )
         return GetTokenResponse.from_dict(res)
 
     def get_permission_levels(self) -> GetTokenPermissionLevelsResponse:
         """Get token permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :returns: :class:`GetTokenPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           '/api/2.0/permissions/authorization/tokens/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/permissions/authorization/tokens/permissionLevels",
+            headers=headers,
+        )
         return GetTokenPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self) -> TokenPermissions:
         """Get token permissions.
-        
+
         Gets the permissions of all tokens. Tokens can inherit permissions from their root object.
-        
+
         :returns: :class:`TokenPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/permissions/authorization/tokens', headers=headers)
+        res = self._api.do("GET", "/api/2.0/permissions/authorization/tokens", headers=headers)
         return TokenPermissions.from_dict(res)
 
-    def list(self,
-             *,
-             created_by_id: Optional[int] = None,
-             created_by_username: Optional[str] = None) -> Iterator[TokenInfo]:
+    def list(
+        self,
+        *,
+        created_by_id: Optional[int] = None,
+        created_by_username: Optional[str] = None,
+    ) -> Iterator[TokenInfo]:
         """List all tokens.
-        
+
         Lists all tokens associated with the specified workspace or user.
-        
+
         :param created_by_id: int (optional)
           User ID of the user that created the token.
         :param created_by_username: str (optional)
           Username of the user that created the token.
-        
+
         :returns: Iterator over :class:`TokenInfo`
         """
 
         query = {}
-        if created_by_id is not None: query['created_by_id'] = created_by_id
-        if created_by_username is not None: query['created_by_username'] = created_by_username
-        headers = {'Accept': 'application/json', }
-
-        json = self._api.do('GET', '/api/2.0/token-management/tokens', query=query, headers=headers)
+        if created_by_id is not None:
+            query["created_by_id"] = created_by_id
+        if created_by_username is not None:
+            query["created_by_username"] = created_by_username
+        headers = {
+            "Accept": "application/json",
+        }
+
+        json = self._api.do(
+            "GET",
+            "/api/2.0/token-management/tokens",
+            query=query,
+            headers=headers,
+        )
         parsed = ListTokensResponse.from_dict(json).token_infos
         return parsed if parsed is not None else []
 
     def set_permissions(
-            self,
-            *,
-            access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions:
+        self,
+        *,
+        access_control_list: Optional[List[TokenAccessControlRequest]] = None,
+    ) -> TokenPermissions:
         """Set token permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
-        
+
         :returns: :class:`TokenPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', '/api/2.0/permissions/authorization/tokens', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            "/api/2.0/permissions/authorization/tokens",
+            body=body,
+            headers=headers,
+        )
         return TokenPermissions.from_dict(res)
 
     def update_permissions(
-            self,
-            *,
-            access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions:
+        self,
+        *,
+        access_control_list: Optional[List[TokenAccessControlRequest]] = None,
+    ) -> TokenPermissions:
         """Update token permissions.
-        
+
         Updates the permissions on all tokens. Tokens can inherit permissions from their root object.
-        
+
         :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
-        
+
         :returns: :class:`TokenPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', '/api/2.0/permissions/authorization/tokens', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            "/api/2.0/permissions/authorization/tokens",
+            body=body,
+            headers=headers,
+        )
         return TokenPermissions.from_dict(res)
 
 
@@ -6674,62 +7928,75 @@ class TokensAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               comment: Optional[str] = None,
-               lifetime_seconds: Optional[int] = None) -> CreateTokenResponse:
+    def create(
+        self,
+        *,
+        comment: Optional[str] = None,
+        lifetime_seconds: Optional[int] = None,
+    ) -> CreateTokenResponse:
         """Create a user token.
-        
+
         Creates and returns a token for a user. If this call is made through token authentication, it creates
         a token with the same client ID as the authenticated token. If the user's token quota is exceeded,
         this call returns an error **QUOTA_EXCEEDED**.
-        
+
         :param comment: str (optional)
           Optional description to attach to the token.
         :param lifetime_seconds: int (optional)
           The lifetime of the token, in seconds.
-          
+
           If the lifetime is not specified, this token remains valid indefinitely.
-        
+
         :returns: :class:`CreateTokenResponse`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if lifetime_seconds is not None: body['lifetime_seconds'] = lifetime_seconds
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if comment is not None:
+            body["comment"] = comment
+        if lifetime_seconds is not None:
+            body["lifetime_seconds"] = lifetime_seconds
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/token/create', body=body, headers=headers)
+        res = self._api.do("POST", "/api/2.0/token/create", body=body, headers=headers)
         return CreateTokenResponse.from_dict(res)
 
     def delete(self, token_id: str):
         """Revoke token.
-        
+
         Revokes an access token.
-        
+
         If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.
-        
+
         :param token_id: str
           The ID of the token to be revoked.
-        
-        
+
+
         """
         body = {}
-        if token_id is not None: body['token_id'] = token_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if token_id is not None:
+            body["token_id"] = token_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/token/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/token/delete", body=body, headers=headers)
 
     def list(self) -> Iterator[PublicTokenInfo]:
         """List tokens.
-        
+
         Lists all the valid tokens for a user-workspace pair.
-        
+
         :returns: Iterator over :class:`PublicTokenInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/token/list', headers=headers)
+        json = self._api.do("GET", "/api/2.0/token/list", headers=headers)
         parsed = ListPublicTokensResponse.from_dict(json).token_infos
         return parsed if parsed is not None else []
 
@@ -6742,30 +8009,35 @@ def __init__(self, api_client):
 
     def get_status(self, keys: str) -> WorkspaceConf:
         """Check configuration status.
-        
+
         Gets the configuration status for a workspace.
-        
+
         :param keys: str
-        
+
         :returns: Dict[str,str]
         """
 
         query = {}
-        if keys is not None: query['keys'] = keys
-        headers = {'Accept': 'application/json', }
+        if keys is not None:
+            query["keys"] = keys
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/workspace-conf', query=query, headers=headers)
+        res = self._api.do("GET", "/api/2.0/workspace-conf", query=query, headers=headers)
         return res
 
     def set_status(self, contents: Dict[str, str]):
         """Enable/disable features.
-        
+
         Sets the configuration status for a workspace, including enabling or disabling it.
-        
-        
-        
+
+
+
         """
 
-        headers = {'Content-Type': 'application/json', }
+        headers = {
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH', '/api/2.0/workspace-conf', body=contents, headers=headers)
+        self._api.do("PATCH", "/api/2.0/workspace-conf", body=contents, headers=headers)
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 2015f4ac5..ca62a5a42 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 from databricks.sdk.service import catalog
 
@@ -19,8 +19,8 @@
 class AuthenticationType(Enum):
     """The delta sharing authentication type."""
 
-    DATABRICKS = 'DATABRICKS'
-    TOKEN = 'TOKEN'
+    DATABRICKS = "DATABRICKS"
+    TOKEN = "TOKEN"
 
 
 @dataclass
@@ -41,28 +41,38 @@ class CreateProvider:
     def as_dict(self) -> dict:
         """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type.value
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.recipient_profile_str is not None:
+            body["recipient_profile_str"] = self.recipient_profile_str
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateProvider into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.recipient_profile_str is not None:
+            body["recipient_profile_str"] = self.recipient_profile_str
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProvider:
         """Deserializes the CreateProvider from a dictionary."""
-        return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   recipient_profile_str=d.get('recipient_profile_str', None))
+        return cls(
+            authentication_type=_enum(d, "authentication_type", AuthenticationType),
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            recipient_profile_str=d.get("recipient_profile_str", None),
+        )
 
 
 @dataclass
@@ -102,45 +112,63 @@ class CreateRecipient:
     def as_dict(self) -> dict:
         """Serializes the CreateRecipient into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value
-        if self.comment is not None: body['comment'] = self.comment
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type.value
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.data_recipient_global_metastore_id is not None:
-            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict()
-        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
+            body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties_kvpairs:
+            body["properties_kvpairs"] = self.properties_kvpairs.as_dict()
+        if self.sharing_code is not None:
+            body["sharing_code"] = self.sharing_code
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
-        if self.comment is not None: body['comment'] = self.comment
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type
+        if self.comment is not None:
+            body["comment"] = self.comment
         if self.data_recipient_global_metastore_id is not None:
-            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
-        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
+            body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties_kvpairs:
+            body["properties_kvpairs"] = self.properties_kvpairs
+        if self.sharing_code is not None:
+            body["sharing_code"] = self.sharing_code
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRecipient:
         """Deserializes the CreateRecipient from a dictionary."""
-        return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType),
-                   comment=d.get('comment', None),
-                   data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None),
-                   expiration_time=d.get('expiration_time', None),
-                   ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs),
-                   sharing_code=d.get('sharing_code', None))
+        return cls(
+            authentication_type=_enum(d, "authentication_type", AuthenticationType),
+            comment=d.get("comment", None),
+            data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None),
+            expiration_time=d.get("expiration_time", None),
+            ip_access_list=_from_dict(d, "ip_access_list", IpAccessList),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs),
+            sharing_code=d.get("sharing_code", None),
+        )
 
 
 @dataclass
@@ -157,25 +185,33 @@ class CreateShare:
     def as_dict(self) -> dict:
         """Serializes the CreateShare into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateShare into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateShare:
         """Deserializes the CreateShare from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   storage_root=d.get('storage_root', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            storage_root=d.get("storage_root", None),
+        )
 
 
 @dataclass
@@ -228,22 +264,28 @@ class GetRecipientSharePermissionsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetRecipientSharePermissionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.permissions_out:
+            body["permissions_out"] = [v.as_dict() for v in self.permissions_out]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetRecipientSharePermissionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.permissions_out: body['permissions_out'] = self.permissions_out
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.permissions_out:
+            body["permissions_out"] = self.permissions_out
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse:
         """Deserializes the GetRecipientSharePermissionsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            permissions_out=_repeated_dict(d, "permissions_out", ShareToPrivilegeAssignment),
+        )
 
 
 @dataclass
@@ -254,19 +296,21 @@ class IpAccessList:
     def as_dict(self) -> dict:
         """Serializes the IpAccessList into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses]
+        if self.allowed_ip_addresses:
+            body["allowed_ip_addresses"] = [v for v in self.allowed_ip_addresses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the IpAccessList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.allowed_ip_addresses: body['allowed_ip_addresses'] = self.allowed_ip_addresses
+        if self.allowed_ip_addresses:
+            body["allowed_ip_addresses"] = self.allowed_ip_addresses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IpAccessList:
         """Deserializes the IpAccessList from a dictionary."""
-        return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None))
+        return cls(allowed_ip_addresses=d.get("allowed_ip_addresses", None))
 
 
 @dataclass
@@ -281,22 +325,28 @@ class ListProviderSharesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListProviderSharesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.shares:
+            body["shares"] = [v.as_dict() for v in self.shares]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListProviderSharesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.shares: body['shares'] = self.shares
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.shares:
+            body["shares"] = self.shares
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse:
         """Deserializes the ListProviderSharesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   shares=_repeated_dict(d, 'shares', ProviderShare))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            shares=_repeated_dict(d, "shares", ProviderShare),
+        )
 
 
 @dataclass
@@ -311,22 +361,28 @@ class ListProvidersResponse:
     def as_dict(self) -> dict:
         """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.providers:
+            body["providers"] = [v.as_dict() for v in self.providers]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.providers: body['providers'] = self.providers
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.providers:
+            body["providers"] = self.providers
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   providers=_repeated_dict(d, 'providers', ProviderInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            providers=_repeated_dict(d, "providers", ProviderInfo),
+        )
 
 
 @dataclass
@@ -341,22 +397,28 @@ class ListRecipientsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListRecipientsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.recipients:
+            body["recipients"] = [v.as_dict() for v in self.recipients]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListRecipientsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.recipients: body['recipients'] = self.recipients
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.recipients:
+            body["recipients"] = self.recipients
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse:
         """Deserializes the ListRecipientsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   recipients=_repeated_dict(d, 'recipients', RecipientInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            recipients=_repeated_dict(d, "recipients", RecipientInfo),
+        )
 
 
 @dataclass
@@ -371,22 +433,28 @@ class ListSharesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListSharesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.shares:
+            body["shares"] = [v.as_dict() for v in self.shares]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListSharesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.shares: body['shares'] = self.shares
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.shares:
+            body["shares"] = self.shares
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse:
         """Deserializes the ListSharesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   shares=_repeated_dict(d, 'shares', ShareInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            shares=_repeated_dict(d, "shares", ShareInfo),
+        )
 
 
 @dataclass
@@ -397,19 +465,21 @@ class Partition:
     def as_dict(self) -> dict:
         """Serializes the Partition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.values: body['values'] = [v.as_dict() for v in self.values]
+        if self.values:
+            body["values"] = [v.as_dict() for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Partition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.values: body['values'] = self.values
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Partition:
         """Deserializes the Partition from a dictionary."""
-        return cls(values=_repeated_dict(d, 'values', PartitionValue))
+        return cls(values=_repeated_dict(d, "values", PartitionValue))
 
 
 @dataclass
@@ -420,19 +490,21 @@ class PartitionSpecificationPartition:
     def as_dict(self) -> dict:
         """Serializes the PartitionSpecificationPartition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.values: body['values'] = [v.as_dict() for v in self.values]
+        if self.values:
+            body["values"] = [v.as_dict() for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PartitionSpecificationPartition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.values: body['values'] = self.values
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionSpecificationPartition:
         """Deserializes the PartitionSpecificationPartition from a dictionary."""
-        return cls(values=_repeated_dict(d, 'values', PartitionValue))
+        return cls(values=_repeated_dict(d, "values", PartitionValue))
 
 
 @dataclass
@@ -454,85 +526,93 @@ class PartitionValue:
     def as_dict(self) -> dict:
         """Serializes the PartitionValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.op is not None: body['op'] = self.op.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.op is not None:
+            body["op"] = self.op.value
         if self.recipient_property_key is not None:
-            body['recipient_property_key'] = self.recipient_property_key
-        if self.value is not None: body['value'] = self.value
+            body["recipient_property_key"] = self.recipient_property_key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PartitionValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.op is not None: body['op'] = self.op
+        if self.name is not None:
+            body["name"] = self.name
+        if self.op is not None:
+            body["op"] = self.op
         if self.recipient_property_key is not None:
-            body['recipient_property_key'] = self.recipient_property_key
-        if self.value is not None: body['value'] = self.value
+            body["recipient_property_key"] = self.recipient_property_key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionValue:
         """Deserializes the PartitionValue from a dictionary."""
-        return cls(name=d.get('name', None),
-                   op=_enum(d, 'op', PartitionValueOp),
-                   recipient_property_key=d.get('recipient_property_key', None),
-                   value=d.get('value', None))
+        return cls(
+            name=d.get("name", None),
+            op=_enum(d, "op", PartitionValueOp),
+            recipient_property_key=d.get("recipient_property_key", None),
+            value=d.get("value", None),
+        )
 
 
 class PartitionValueOp(Enum):
 
-    EQUAL = 'EQUAL'
-    LIKE = 'LIKE'
+    EQUAL = "EQUAL"
+    LIKE = "LIKE"
 
 
 class Privilege(Enum):
 
-    ACCESS = 'ACCESS'
-    ALL_PRIVILEGES = 'ALL_PRIVILEGES'
-    APPLY_TAG = 'APPLY_TAG'
-    CREATE = 'CREATE'
-    CREATE_CATALOG = 'CREATE_CATALOG'
-    CREATE_CONNECTION = 'CREATE_CONNECTION'
-    CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION'
-    CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE'
-    CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME'
-    CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG'
-    CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE'
-    CREATE_FUNCTION = 'CREATE_FUNCTION'
-    CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE'
-    CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW'
-    CREATE_MODEL = 'CREATE_MODEL'
-    CREATE_PROVIDER = 'CREATE_PROVIDER'
-    CREATE_RECIPIENT = 'CREATE_RECIPIENT'
-    CREATE_SCHEMA = 'CREATE_SCHEMA'
-    CREATE_SERVICE_CREDENTIAL = 'CREATE_SERVICE_CREDENTIAL'
-    CREATE_SHARE = 'CREATE_SHARE'
-    CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL'
-    CREATE_TABLE = 'CREATE_TABLE'
-    CREATE_VIEW = 'CREATE_VIEW'
-    CREATE_VOLUME = 'CREATE_VOLUME'
-    EXECUTE = 'EXECUTE'
-    MANAGE = 'MANAGE'
-    MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST'
-    MODIFY = 'MODIFY'
-    READ_FILES = 'READ_FILES'
-    READ_PRIVATE_FILES = 'READ_PRIVATE_FILES'
-    READ_VOLUME = 'READ_VOLUME'
-    REFRESH = 'REFRESH'
-    SELECT = 'SELECT'
-    SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION'
-    USAGE = 'USAGE'
-    USE_CATALOG = 'USE_CATALOG'
-    USE_CONNECTION = 'USE_CONNECTION'
-    USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS'
-    USE_PROVIDER = 'USE_PROVIDER'
-    USE_RECIPIENT = 'USE_RECIPIENT'
-    USE_SCHEMA = 'USE_SCHEMA'
-    USE_SHARE = 'USE_SHARE'
-    WRITE_FILES = 'WRITE_FILES'
-    WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES'
-    WRITE_VOLUME = 'WRITE_VOLUME'
+    ACCESS = "ACCESS"
+    ALL_PRIVILEGES = "ALL_PRIVILEGES"
+    APPLY_TAG = "APPLY_TAG"
+    CREATE = "CREATE"
+    CREATE_CATALOG = "CREATE_CATALOG"
+    CREATE_CONNECTION = "CREATE_CONNECTION"
+    CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION"
+    CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE"
+    CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME"
+    CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG"
+    CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE"
+    CREATE_FUNCTION = "CREATE_FUNCTION"
+    CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE"
+    CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW"
+    CREATE_MODEL = "CREATE_MODEL"
+    CREATE_PROVIDER = "CREATE_PROVIDER"
+    CREATE_RECIPIENT = "CREATE_RECIPIENT"
+    CREATE_SCHEMA = "CREATE_SCHEMA"
+    CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL"
+    CREATE_SHARE = "CREATE_SHARE"
+    CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL"
+    CREATE_TABLE = "CREATE_TABLE"
+    CREATE_VIEW = "CREATE_VIEW"
+    CREATE_VOLUME = "CREATE_VOLUME"
+    EXECUTE = "EXECUTE"
+    MANAGE = "MANAGE"
+    MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST"
+    MODIFY = "MODIFY"
+    READ_FILES = "READ_FILES"
+    READ_PRIVATE_FILES = "READ_PRIVATE_FILES"
+    READ_VOLUME = "READ_VOLUME"
+    REFRESH = "REFRESH"
+    SELECT = "SELECT"
+    SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION"
+    USAGE = "USAGE"
+    USE_CATALOG = "USE_CATALOG"
+    USE_CONNECTION = "USE_CONNECTION"
+    USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS"
+    USE_PROVIDER = "USE_PROVIDER"
+    USE_RECIPIENT = "USE_RECIPIENT"
+    USE_SCHEMA = "USE_SCHEMA"
+    USE_SHARE = "USE_SHARE"
+    WRITE_FILES = "WRITE_FILES"
+    WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES"
+    WRITE_VOLUME = "WRITE_VOLUME"
 
 
 @dataclass
@@ -546,21 +626,28 @@ class PrivilegeAssignment:
     def as_dict(self) -> dict:
         """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.privileges: body['privileges'] = [v.value for v in self.privileges]
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.privileges:
+            body["privileges"] = [v.value for v in self.privileges]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.privileges: body['privileges'] = self.privileges
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.privileges:
+            body["privileges"] = self.privileges
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment:
         """Deserializes the PrivilegeAssignment from a dictionary."""
-        return cls(principal=d.get('principal', None), privileges=_repeated_enum(d, 'privileges', Privilege))
+        return cls(
+            principal=d.get("principal", None),
+            privileges=_repeated_enum(d, "privileges", Privilege),
+        )
 
 
 @dataclass
@@ -617,60 +704,88 @@ class ProviderInfo:
     def as_dict(self) -> dict:
         """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type.value
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.data_provider_global_metastore_id is not None:
-            body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.recipient_profile: body['recipient_profile'] = self.recipient_profile.as_dict()
-        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
-        if self.region is not None: body['region'] = self.region
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["data_provider_global_metastore_id"] = self.data_provider_global_metastore_id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.recipient_profile:
+            body["recipient_profile"] = self.recipient_profile.as_dict()
+        if self.recipient_profile_str is not None:
+            body["recipient_profile_str"] = self.recipient_profile_str
+        if self.region is not None:
+            body["region"] = self.region
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.data_provider_global_metastore_id is not None:
-            body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.recipient_profile: body['recipient_profile'] = self.recipient_profile
-        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
-        if self.region is not None: body['region'] = self.region
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["data_provider_global_metastore_id"] = self.data_provider_global_metastore_id
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.recipient_profile:
+            body["recipient_profile"] = self.recipient_profile
+        if self.recipient_profile_str is not None:
+            body["recipient_profile_str"] = self.recipient_profile_str
+        if self.region is not None:
+            body["region"] = self.region
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
         """Deserializes the ProviderInfo from a dictionary."""
-        return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType),
-                   cloud=d.get('cloud', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   data_provider_global_metastore_id=d.get('data_provider_global_metastore_id', None),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   recipient_profile=_from_dict(d, 'recipient_profile', RecipientProfile),
-                   recipient_profile_str=d.get('recipient_profile_str', None),
-                   region=d.get('region', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            authentication_type=_enum(d, "authentication_type", AuthenticationType),
+            cloud=d.get("cloud", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            data_provider_global_metastore_id=d.get("data_provider_global_metastore_id", None),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            recipient_profile=_from_dict(d, "recipient_profile", RecipientProfile),
+            recipient_profile_str=d.get("recipient_profile_str", None),
+            region=d.get("region", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -681,19 +796,21 @@ class ProviderShare:
     def as_dict(self) -> dict:
         """Serializes the ProviderShare into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ProviderShare into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderShare:
         """Deserializes the ProviderShare from a dictionary."""
-        return cls(name=d.get('name', None))
+        return cls(name=d.get("name", None))
 
 
 @dataclass
@@ -767,75 +884,113 @@ class RecipientInfo:
     def as_dict(self) -> dict:
         """Serializes the RecipientInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.activated is not None: body['activated'] = self.activated
-        if self.activation_url is not None: body['activation_url'] = self.activation_url
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.activated is not None:
+            body["activated"] = self.activated
+        if self.activation_url is not None:
+            body["activation_url"] = self.activation_url
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type.value
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.data_recipient_global_metastore_id is not None:
-            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict()
-        if self.region is not None: body['region'] = self.region
-        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
-        if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens]
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list.as_dict()
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties_kvpairs:
+            body["properties_kvpairs"] = self.properties_kvpairs.as_dict()
+        if self.region is not None:
+            body["region"] = self.region
+        if self.sharing_code is not None:
+            body["sharing_code"] = self.sharing_code
+        if self.tokens:
+            body["tokens"] = [v.as_dict() for v in self.tokens]
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RecipientInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.activated is not None: body['activated'] = self.activated
-        if self.activation_url is not None: body['activation_url'] = self.activation_url
-        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
-        if self.cloud is not None: body['cloud'] = self.cloud
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.activated is not None:
+            body["activated"] = self.activated
+        if self.activation_url is not None:
+            body["activation_url"] = self.activation_url
+        if self.authentication_type is not None:
+            body["authentication_type"] = self.authentication_type
+        if self.cloud is not None:
+            body["cloud"] = self.cloud
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
         if self.data_recipient_global_metastore_id is not None:
-            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
-        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
-        if self.region is not None: body['region'] = self.region
-        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
-        if self.tokens: body['tokens'] = self.tokens
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+            body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list
+        if self.metastore_id is not None:
+            body["metastore_id"] = self.metastore_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties_kvpairs:
+            body["properties_kvpairs"] = self.properties_kvpairs
+        if self.region is not None:
+            body["region"] = self.region
+        if self.sharing_code is not None:
+            body["sharing_code"] = self.sharing_code
+        if self.tokens:
+            body["tokens"] = self.tokens
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientInfo:
         """Deserializes the RecipientInfo from a dictionary."""
-        return cls(activated=d.get('activated', None),
-                   activation_url=d.get('activation_url', None),
-                   authentication_type=_enum(d, 'authentication_type', AuthenticationType),
-                   cloud=d.get('cloud', None),
-                   comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None),
-                   expiration_time=d.get('expiration_time', None),
-                   ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
-                   metastore_id=d.get('metastore_id', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs),
-                   region=d.get('region', None),
-                   sharing_code=d.get('sharing_code', None),
-                   tokens=_repeated_dict(d, 'tokens', RecipientTokenInfo),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            activated=d.get("activated", None),
+            activation_url=d.get("activation_url", None),
+            authentication_type=_enum(d, "authentication_type", AuthenticationType),
+            cloud=d.get("cloud", None),
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None),
+            expiration_time=d.get("expiration_time", None),
+            ip_access_list=_from_dict(d, "ip_access_list", IpAccessList),
+            metastore_id=d.get("metastore_id", None),
+            name=d.get("name", None),
+            owner=d.get("owner", None),
+            properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs),
+            region=d.get("region", None),
+            sharing_code=d.get("sharing_code", None),
+            tokens=_repeated_dict(d, "tokens", RecipientTokenInfo),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -852,27 +1007,33 @@ class RecipientProfile:
     def as_dict(self) -> dict:
         """Serializes the RecipientProfile into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bearer_token is not None: body['bearer_token'] = self.bearer_token
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.bearer_token is not None:
+            body["bearer_token"] = self.bearer_token
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
         if self.share_credentials_version is not None:
-            body['share_credentials_version'] = self.share_credentials_version
+            body["share_credentials_version"] = self.share_credentials_version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RecipientProfile into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bearer_token is not None: body['bearer_token'] = self.bearer_token
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.bearer_token is not None:
+            body["bearer_token"] = self.bearer_token
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
         if self.share_credentials_version is not None:
-            body['share_credentials_version'] = self.share_credentials_version
+            body["share_credentials_version"] = self.share_credentials_version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientProfile:
         """Deserializes the RecipientProfile from a dictionary."""
-        return cls(bearer_token=d.get('bearer_token', None),
-                   endpoint=d.get('endpoint', None),
-                   share_credentials_version=d.get('share_credentials_version', None))
+        return cls(
+            bearer_token=d.get("bearer_token", None),
+            endpoint=d.get("endpoint", None),
+            share_credentials_version=d.get("share_credentials_version", None),
+        )
 
 
 @dataclass
@@ -902,37 +1063,53 @@ class RecipientTokenInfo:
     def as_dict(self) -> dict:
         """Serializes the RecipientTokenInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.activation_url is not None: body['activation_url'] = self.activation_url
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.id is not None: body['id'] = self.id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.activation_url is not None:
+            body["activation_url"] = self.activation_url
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RecipientTokenInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.activation_url is not None: body['activation_url'] = self.activation_url
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.id is not None: body['id'] = self.id
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.activation_url is not None:
+            body["activation_url"] = self.activation_url
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.id is not None:
+            body["id"] = self.id
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientTokenInfo:
         """Deserializes the RecipientTokenInfo from a dictionary."""
-        return cls(activation_url=d.get('activation_url', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   expiration_time=d.get('expiration_time', None),
-                   id=d.get('id', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            activation_url=d.get("activation_url", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            expiration_time=d.get("expiration_time", None),
+            id=d.get("id", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -952,30 +1129,38 @@ class RetrieveTokenResponse:
     def as_dict(self) -> dict:
         """Serializes the RetrieveTokenResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
-        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.bearer_token is not None:
+            body["bearerToken"] = self.bearer_token
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
+        if self.expiration_time is not None:
+            body["expirationTime"] = self.expiration_time
         if self.share_credentials_version is not None:
-            body['shareCredentialsVersion'] = self.share_credentials_version
+            body["shareCredentialsVersion"] = self.share_credentials_version
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RetrieveTokenResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
-        if self.endpoint is not None: body['endpoint'] = self.endpoint
-        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.bearer_token is not None:
+            body["bearerToken"] = self.bearer_token
+        if self.endpoint is not None:
+            body["endpoint"] = self.endpoint
+        if self.expiration_time is not None:
+            body["expirationTime"] = self.expiration_time
         if self.share_credentials_version is not None:
-            body['shareCredentialsVersion'] = self.share_credentials_version
+            body["shareCredentialsVersion"] = self.share_credentials_version
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RetrieveTokenResponse:
         """Deserializes the RetrieveTokenResponse from a dictionary."""
-        return cls(bearer_token=d.get('bearerToken', None),
-                   endpoint=d.get('endpoint', None),
-                   expiration_time=d.get('expirationTime', None),
-                   share_credentials_version=d.get('shareCredentialsVersion', None))
+        return cls(
+            bearer_token=d.get("bearerToken", None),
+            endpoint=d.get("endpoint", None),
+            expiration_time=d.get("expirationTime", None),
+            share_credentials_version=d.get("shareCredentialsVersion", None),
+        )
 
 
 @dataclass
@@ -992,23 +1177,27 @@ def as_dict(self) -> dict:
         """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.existing_token_expire_in_seconds is not None:
-            body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds
-        if self.name is not None: body['name'] = self.name
+            body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.existing_token_expire_in_seconds is not None:
-            body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds
-        if self.name is not None: body['name'] = self.name
+            body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RotateRecipientToken:
         """Deserializes the RotateRecipientToken from a dictionary."""
-        return cls(existing_token_expire_in_seconds=d.get('existing_token_expire_in_seconds', None),
-                   name=d.get('name', None))
+        return cls(
+            existing_token_expire_in_seconds=d.get("existing_token_expire_in_seconds", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -1021,19 +1210,21 @@ class SecurablePropertiesKvPairs:
     def as_dict(self) -> dict:
         """Serializes the SecurablePropertiesKvPairs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.properties: body['properties'] = self.properties
+        if self.properties:
+            body["properties"] = self.properties
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SecurablePropertiesKvPairs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.properties: body['properties'] = self.properties
+        if self.properties:
+            body["properties"] = self.properties
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecurablePropertiesKvPairs:
         """Deserializes the SecurablePropertiesKvPairs from a dictionary."""
-        return cls(properties=d.get('properties', None))
+        return cls(properties=d.get("properties", None))
 
 
 @dataclass
@@ -1071,46 +1262,68 @@ class ShareInfo:
     def as_dict(self) -> dict:
         """Serializes the ShareInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.name is not None: body['name'] = self.name
-        if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
-        if self.owner is not None: body['owner'] = self.owner
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.name is not None:
+            body["name"] = self.name
+        if self.objects:
+            body["objects"] = [v.as_dict() for v in self.objects]
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ShareInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.name is not None: body['name'] = self.name
-        if self.objects: body['objects'] = self.objects
-        if self.owner is not None: body['owner'] = self.owner
-        if self.storage_location is not None: body['storage_location'] = self.storage_location
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.created_by is not None:
+            body["created_by"] = self.created_by
+        if self.name is not None:
+            body["name"] = self.name
+        if self.objects:
+            body["objects"] = self.objects
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.storage_location is not None:
+            body["storage_location"] = self.storage_location
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.updated_by is not None:
+            body["updated_by"] = self.updated_by
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareInfo:
         """Deserializes the ShareInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   name=d.get('name', None),
-                   objects=_repeated_dict(d, 'objects', SharedDataObject),
-                   owner=d.get('owner', None),
-                   storage_location=d.get('storage_location', None),
-                   storage_root=d.get('storage_root', None),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+        return cls(
+            comment=d.get("comment", None),
+            created_at=d.get("created_at", None),
+            created_by=d.get("created_by", None),
+            name=d.get("name", None),
+            objects=_repeated_dict(d, "objects", SharedDataObject),
+            owner=d.get("owner", None),
+            storage_location=d.get("storage_location", None),
+            storage_root=d.get("storage_root", None),
+            updated_at=d.get("updated_at", None),
+            updated_by=d.get("updated_by", None),
+        )
 
 
 @dataclass
@@ -1125,22 +1338,27 @@ def as_dict(self) -> dict:
         """Serializes the ShareToPrivilegeAssignment into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.privilege_assignments:
-            body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
-        if self.share_name is not None: body['share_name'] = self.share_name
+            body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments]
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ShareToPrivilegeAssignment into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
-        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.privilege_assignments:
+            body["privilege_assignments"] = self.privilege_assignments
+        if self.share_name is not None:
+            body["share_name"] = self.share_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareToPrivilegeAssignment:
         """Deserializes the ShareToPrivilegeAssignment from a dictionary."""
-        return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment),
-                   share_name=d.get('share_name', None))
+        return cls(
+            privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment),
+            share_name=d.get("share_name", None),
+        )
 
 
 @dataclass
@@ -1202,87 +1420,116 @@ class SharedDataObject:
     def as_dict(self) -> dict:
         """Serializes the SharedDataObject into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.added_at is not None: body['added_at'] = self.added_at
-        if self.added_by is not None: body['added_by'] = self.added_by
-        if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled
-        if self.comment is not None: body['comment'] = self.comment
-        if self.content is not None: body['content'] = self.content
-        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type.value
+        if self.added_at is not None:
+            body["added_at"] = self.added_at
+        if self.added_by is not None:
+            body["added_by"] = self.added_by
+        if self.cdf_enabled is not None:
+            body["cdf_enabled"] = self.cdf_enabled
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.content is not None:
+            body["content"] = self.content
+        if self.data_object_type is not None:
+            body["data_object_type"] = self.data_object_type.value
         if self.history_data_sharing_status is not None:
-            body['history_data_sharing_status'] = self.history_data_sharing_status.value
-        if self.name is not None: body['name'] = self.name
-        if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions]
-        if self.shared_as is not None: body['shared_as'] = self.shared_as
-        if self.start_version is not None: body['start_version'] = self.start_version
-        if self.status is not None: body['status'] = self.status.value
-        if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as
+            body["history_data_sharing_status"] = self.history_data_sharing_status.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.partitions:
+            body["partitions"] = [v.as_dict() for v in self.partitions]
+        if self.shared_as is not None:
+            body["shared_as"] = self.shared_as
+        if self.start_version is not None:
+            body["start_version"] = self.start_version
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.string_shared_as is not None:
+            body["string_shared_as"] = self.string_shared_as
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.added_at is not None: body['added_at'] = self.added_at
-        if self.added_by is not None: body['added_by'] = self.added_by
-        if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled
-        if self.comment is not None: body['comment'] = self.comment
-        if self.content is not None: body['content'] = self.content
-        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
+        if self.added_at is not None:
+            body["added_at"] = self.added_at
+        if self.added_by is not None:
+            body["added_by"] = self.added_by
+        if self.cdf_enabled is not None:
+            body["cdf_enabled"] = self.cdf_enabled
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.content is not None:
+            body["content"] = self.content
+        if self.data_object_type is not None:
+            body["data_object_type"] = self.data_object_type
         if self.history_data_sharing_status is not None:
-            body['history_data_sharing_status'] = self.history_data_sharing_status
-        if self.name is not None: body['name'] = self.name
-        if self.partitions: body['partitions'] = self.partitions
-        if self.shared_as is not None: body['shared_as'] = self.shared_as
-        if self.start_version is not None: body['start_version'] = self.start_version
-        if self.status is not None: body['status'] = self.status
-        if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as
+            body["history_data_sharing_status"] = self.history_data_sharing_status
+        if self.name is not None:
+            body["name"] = self.name
+        if self.partitions:
+            body["partitions"] = self.partitions
+        if self.shared_as is not None:
+            body["shared_as"] = self.shared_as
+        if self.start_version is not None:
+            body["start_version"] = self.start_version
+        if self.status is not None:
+            body["status"] = self.status
+        if self.string_shared_as is not None:
+            body["string_shared_as"] = self.string_shared_as
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         """Deserializes the SharedDataObject from a dictionary."""
-        return cls(added_at=d.get('added_at', None),
-                   added_by=d.get('added_by', None),
-                   cdf_enabled=d.get('cdf_enabled', None),
-                   comment=d.get('comment', None),
-                   content=d.get('content', None),
-                   data_object_type=_enum(d, 'data_object_type', SharedDataObjectDataObjectType),
-                   history_data_sharing_status=_enum(d, 'history_data_sharing_status',
-                                                     SharedDataObjectHistoryDataSharingStatus),
-                   name=d.get('name', None),
-                   partitions=_repeated_dict(d, 'partitions', Partition),
-                   shared_as=d.get('shared_as', None),
-                   start_version=d.get('start_version', None),
-                   status=_enum(d, 'status', SharedDataObjectStatus),
-                   string_shared_as=d.get('string_shared_as', None))
+        return cls(
+            added_at=d.get("added_at", None),
+            added_by=d.get("added_by", None),
+            cdf_enabled=d.get("cdf_enabled", None),
+            comment=d.get("comment", None),
+            content=d.get("content", None),
+            data_object_type=_enum(d, "data_object_type", SharedDataObjectDataObjectType),
+            history_data_sharing_status=_enum(
+                d,
+                "history_data_sharing_status",
+                SharedDataObjectHistoryDataSharingStatus,
+            ),
+            name=d.get("name", None),
+            partitions=_repeated_dict(d, "partitions", Partition),
+            shared_as=d.get("shared_as", None),
+            start_version=d.get("start_version", None),
+            status=_enum(d, "status", SharedDataObjectStatus),
+            string_shared_as=d.get("string_shared_as", None),
+        )
 
 
 class SharedDataObjectDataObjectType(Enum):
     """The type of the data object."""
 
-    FEATURE_SPEC = 'FEATURE_SPEC'
-    FUNCTION = 'FUNCTION'
-    MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
-    MODEL = 'MODEL'
-    NOTEBOOK_FILE = 'NOTEBOOK_FILE'
-    SCHEMA = 'SCHEMA'
-    STREAMING_TABLE = 'STREAMING_TABLE'
-    TABLE = 'TABLE'
-    VIEW = 'VIEW'
+    FEATURE_SPEC = "FEATURE_SPEC"
+    FUNCTION = "FUNCTION"
+    MATERIALIZED_VIEW = "MATERIALIZED_VIEW"
+    MODEL = "MODEL"
+    NOTEBOOK_FILE = "NOTEBOOK_FILE"
+    SCHEMA = "SCHEMA"
+    STREAMING_TABLE = "STREAMING_TABLE"
+    TABLE = "TABLE"
+    VIEW = "VIEW"
 
 
 class SharedDataObjectHistoryDataSharingStatus(Enum):
     """Whether to enable or disable sharing of data history. If not specified, the default is
     **DISABLED**."""
 
-    DISABLED = 'DISABLED'
-    ENABLED = 'ENABLED'
+    DISABLED = "DISABLED"
+    ENABLED = "ENABLED"
 
 
 class SharedDataObjectStatus(Enum):
     """One of: **ACTIVE**, **PERMISSION_DENIED**."""
 
-    ACTIVE = 'ACTIVE'
-    PERMISSION_DENIED = 'PERMISSION_DENIED'
+    ACTIVE = "ACTIVE"
+    PERMISSION_DENIED = "PERMISSION_DENIED"
 
 
 @dataclass
@@ -1296,30 +1543,36 @@ class SharedDataObjectUpdate:
     def as_dict(self) -> dict:
         """Serializes the SharedDataObjectUpdate into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.action is not None: body['action'] = self.action.value
-        if self.data_object: body['data_object'] = self.data_object.as_dict()
+        if self.action is not None:
+            body["action"] = self.action.value
+        if self.data_object:
+            body["data_object"] = self.data_object.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SharedDataObjectUpdate into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.action is not None: body['action'] = self.action
-        if self.data_object: body['data_object'] = self.data_object
+        if self.action is not None:
+            body["action"] = self.action
+        if self.data_object:
+            body["data_object"] = self.data_object
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObjectUpdate:
         """Deserializes the SharedDataObjectUpdate from a dictionary."""
-        return cls(action=_enum(d, 'action', SharedDataObjectUpdateAction),
-                   data_object=_from_dict(d, 'data_object', SharedDataObject))
+        return cls(
+            action=_enum(d, "action", SharedDataObjectUpdateAction),
+            data_object=_from_dict(d, "data_object", SharedDataObject),
+        )
 
 
 class SharedDataObjectUpdateAction(Enum):
     """One of: **ADD**, **REMOVE**, **UPDATE**."""
 
-    ADD = 'ADD'
-    REMOVE = 'REMOVE'
-    UPDATE = 'UPDATE'
+    ADD = "ADD"
+    REMOVE = "REMOVE"
+    UPDATE = "UPDATE"
 
 
 @dataclass
@@ -1362,31 +1615,43 @@ class UpdateProvider:
     def as_dict(self) -> dict:
         """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.recipient_profile_str is not None:
+            body["recipient_profile_str"] = self.recipient_profile_str
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.recipient_profile_str is not None:
+            body["recipient_profile_str"] = self.recipient_profile_str
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProvider:
         """Deserializes the UpdateProvider from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   recipient_profile_str=d.get('recipient_profile_str', None))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            recipient_profile_str=d.get("recipient_profile_str", None),
+        )
 
 
 @dataclass
@@ -1417,37 +1682,53 @@ class UpdateRecipient:
     def as_dict(self) -> dict:
         """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict()
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties_kvpairs:
+            body["properties_kvpairs"] = self.properties_kvpairs.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
-        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.expiration_time is not None:
+            body["expiration_time"] = self.expiration_time
+        if self.ip_access_list:
+            body["ip_access_list"] = self.ip_access_list
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.properties_kvpairs:
+            body["properties_kvpairs"] = self.properties_kvpairs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient:
         """Deserializes the UpdateRecipient from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   expiration_time=d.get('expiration_time', None),
-                   ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs))
+        return cls(
+            comment=d.get("comment", None),
+            expiration_time=d.get("expiration_time", None),
+            ip_access_list=_from_dict(d, "ip_access_list", IpAccessList),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs),
+        )
 
 
 @dataclass
@@ -1473,34 +1754,48 @@ class UpdateShare:
     def as_dict(self) -> dict:
         """Serializes the UpdateShare into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updates:
+            body["updates"] = [v.as_dict() for v in self.updates]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateShare into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.new_name is not None: body['new_name'] = self.new_name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.storage_root is not None: body['storage_root'] = self.storage_root
-        if self.updates: body['updates'] = self.updates
+        if self.comment is not None:
+            body["comment"] = self.comment
+        if self.name is not None:
+            body["name"] = self.name
+        if self.new_name is not None:
+            body["new_name"] = self.new_name
+        if self.owner is not None:
+            body["owner"] = self.owner
+        if self.storage_root is not None:
+            body["storage_root"] = self.storage_root
+        if self.updates:
+            body["updates"] = self.updates
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateShare:
         """Deserializes the UpdateShare from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   new_name=d.get('new_name', None),
-                   owner=d.get('owner', None),
-                   storage_root=d.get('storage_root', None),
-                   updates=_repeated_dict(d, 'updates', SharedDataObjectUpdate))
+        return cls(
+            comment=d.get("comment", None),
+            name=d.get("name", None),
+            new_name=d.get("new_name", None),
+            owner=d.get("owner", None),
+            storage_root=d.get("storage_root", None),
+            updates=_repeated_dict(d, "updates", SharedDataObjectUpdate),
+        )
 
 
 @dataclass
@@ -1526,28 +1821,38 @@ class UpdateSharePermissions:
     def as_dict(self) -> dict:
         """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.changes: body['changes'] = [v.as_dict() for v in self.changes]
-        if self.max_results is not None: body['max_results'] = self.max_results
-        if self.name is not None: body['name'] = self.name
-        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.changes:
+            body["changes"] = [v.as_dict() for v in self.changes]
+        if self.max_results is not None:
+            body["max_results"] = self.max_results
+        if self.name is not None:
+            body["name"] = self.name
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.changes: body['changes'] = self.changes
-        if self.max_results is not None: body['max_results'] = self.max_results
-        if self.name is not None: body['name'] = self.name
-        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.changes:
+            body["changes"] = self.changes
+        if self.max_results is not None:
+            body["max_results"] = self.max_results
+        if self.name is not None:
+            body["name"] = self.name
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions:
         """Deserializes the UpdateSharePermissions from a dictionary."""
-        return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange),
-                   max_results=d.get('max_results', None),
-                   name=d.get('name', None),
-                   page_token=d.get('page_token', None))
+        return cls(
+            changes=_repeated_dict(d, "changes", catalog.PermissionsChange),
+            max_results=d.get("max_results", None),
+            name=d.get("name", None),
+            page_token=d.get("page_token", None),
+        )
 
 
 class ProvidersAPI:
@@ -1557,17 +1862,19 @@ class ProvidersAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               authentication_type: AuthenticationType,
-               *,
-               comment: Optional[str] = None,
-               recipient_profile_str: Optional[str] = None) -> ProviderInfo:
+    def create(
+        self,
+        name: str,
+        authentication_type: AuthenticationType,
+        *,
+        comment: Optional[str] = None,
+        recipient_profile_str: Optional[str] = None,
+    ) -> ProviderInfo:
         """Create an auth provider.
-        
+
         Creates a new authentication provider minimally based on a name and authentication type. The caller
         must be an admin on the metastore.
-        
+
         :param name: str
           The name of the Provider.
         :param authentication_type: :class:`AuthenticationType`
@@ -1577,63 +1884,85 @@ def create(self,
         :param recipient_profile_str: str (optional)
           This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
           or not provided.
-        
+
         :returns: :class:`ProviderInfo`
         """
         body = {}
-        if authentication_type is not None: body['authentication_type'] = authentication_type.value
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/providers', body=body, headers=headers)
+        if authentication_type is not None:
+            body["authentication_type"] = authentication_type.value
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if recipient_profile_str is not None:
+            body["recipient_profile_str"] = recipient_profile_str
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/providers",
+            body=body,
+            headers=headers,
+        )
         return ProviderInfo.from_dict(res)
 
     def delete(self, name: str):
         """Delete a provider.
-        
+
         Deletes an authentication provider, if the caller is a metastore admin or is the owner of the
         provider.
-        
+
         :param name: str
           Name of the provider.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/providers/{name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/providers/{name}",
+            headers=headers,
+        )
 
     def get(self, name: str) -> ProviderInfo:
         """Get a provider.
-        
+
         Gets a specific authentication provider. The caller must supply the name of the provider, and must
         either be a metastore admin or the owner of the provider.
-        
+
         :param name: str
           Name of the provider.
-        
+
         :returns: :class:`ProviderInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.1/unity-catalog/providers/{name}", headers=headers)
         return ProviderInfo.from_dict(res)
 
-    def list(self,
-             *,
-             data_provider_global_metastore_id: Optional[str] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ProviderInfo]:
+    def list(
+        self,
+        *,
+        data_provider_global_metastore_id: Optional[str] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ProviderInfo]:
         """List providers.
-        
+
         Gets an array of available authentication providers. The caller must either be a metastore admin or
         the owner of the providers. Providers not owned by the caller are not included in the response. There
         is no guarantee of a specific ordering of the elements in the array.
-        
+
         :param data_provider_global_metastore_id: str (optional)
           If not provided, all providers will be returned. If no providers exist with this ID, no results will
           be returned.
@@ -1647,38 +1976,50 @@ def list(self,
           from the response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`ProviderInfo`
         """
 
         query = {}
         if data_provider_global_metastore_id is not None:
-            query['data_provider_global_metastore_id'] = data_provider_global_metastore_id
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+            query["data_provider_global_metastore_id"] = data_provider_global_metastore_id
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers)
-            if 'providers' in json:
-                for v in json['providers']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/providers",
+                query=query,
+                headers=headers,
+            )
+            if "providers" in json:
+                for v in json["providers"]:
                     yield ProviderInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def list_shares(self,
-                    name: str,
-                    *,
-                    max_results: Optional[int] = None,
-                    page_token: Optional[str] = None) -> Iterator[ProviderShare]:
+            query["page_token"] = json["next_page_token"]
+
+    def list_shares(
+        self,
+        name: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ProviderShare]:
         """List shares by Provider.
-        
+
         Gets an array of a specified provider's shares within the metastore where:
-        
+
         * the caller is a metastore admin, or * the caller is the owner.
-        
+
         :param name: str
           Name of the provider in which to list shares.
         :param max_results: int (optional)
@@ -1691,41 +2032,50 @@ def list_shares(self,
           response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`ProviderShare`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.1/unity-catalog/providers/{name}/shares',
-                                query=query,
-                                headers=headers)
-            if 'shares' in json:
-                for v in json['shares']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.1/unity-catalog/providers/{name}/shares",
+                query=query,
+                headers=headers,
+            )
+            if "shares" in json:
+                for v in json["shares"]:
                     yield ProviderShare.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               recipient_profile_str: Optional[str] = None) -> ProviderInfo:
+            query["page_token"] = json["next_page_token"]
+
+    def update(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        recipient_profile_str: Optional[str] = None,
+    ) -> ProviderInfo:
         """Update a provider.
-        
+
         Updates the information for an authentication provider, if the caller is a metastore admin or is the
         owner of the provider. If the update changes the provider name, the caller must be both a metastore
         admin and the owner of the provider.
-        
+
         :param name: str
           Name of the provider.
         :param comment: str (optional)
@@ -1737,17 +2087,29 @@ def update(self,
         :param recipient_profile_str: str (optional)
           This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
           or not provided.
-        
+
         :returns: :class:`ProviderInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/providers/{name}', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if recipient_profile_str is not None:
+            body["recipient_profile_str"] = recipient_profile_str
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/providers/{name}",
+            body=body,
+            headers=headers,
+        )
         return ProviderInfo.from_dict(res)
 
 
@@ -1756,46 +2118,55 @@ class RecipientActivationAPI:
     the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data
     provider to download the credential file that includes the access token. The recipient will then use the
     credential file to establish a secure connection with the provider to receive the shared data.
-    
+
     Note that you can download the credential file only once. Recipients should treat the downloaded
-    credential as a secret and must not share it outside of their organization."""
+    credential as a secret and must not share it outside of their organization.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def get_activation_url_info(self, activation_url: str):
         """Get a share activation URL.
-        
+
         Gets an activation URL for a share.
-        
+
         :param activation_url: str
           The one time activation url. It also accepts activation token.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('GET',
-                     f'/api/2.1/unity-catalog/public/data_sharing_activation_info/{activation_url}',
-                     headers=headers)
+        self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/public/data_sharing_activation_info/{activation_url}",
+            headers=headers,
+        )
 
     def retrieve_token(self, activation_url: str) -> RetrieveTokenResponse:
         """Get an access token.
-        
+
         Retrieve access token with an activation url. This is a public API without any authentication.
-        
+
         :param activation_url: str
           The one time activation url. It also accepts activation token.
-        
+
         :returns: :class:`RetrieveTokenResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/public/data_sharing_activation/{activation_url}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/public/data_sharing_activation/{activation_url}",
+            headers=headers,
+        )
         return RetrieveTokenResponse.from_dict(res)
 
 
@@ -1803,36 +2174,39 @@ class RecipientsAPI:
     """A recipient is an object you create using :method:recipients/create to represent an organization which you
     want to allow access shares. The way how sharing works differs depending on whether or not your recipient
     has access to a Databricks workspace that is enabled for Unity Catalog:
-    
+
     - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a
     recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier
     is the key identifier that enables the secure connection. This sharing mode is called
     **Databricks-to-Databricks sharing**.
-    
+
     - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you
     create a recipient object, Databricks generates an activation link you can send to the recipient. The
     recipient follows the activation link to download the credential file, and then uses the credential file
-    to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**."""
+    to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               authentication_type: AuthenticationType,
-               *,
-               comment: Optional[str] = None,
-               data_recipient_global_metastore_id: Optional[str] = None,
-               expiration_time: Optional[int] = None,
-               ip_access_list: Optional[IpAccessList] = None,
-               owner: Optional[str] = None,
-               properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None,
-               sharing_code: Optional[str] = None) -> RecipientInfo:
+    def create(
+        self,
+        name: str,
+        authentication_type: AuthenticationType,
+        *,
+        comment: Optional[str] = None,
+        data_recipient_global_metastore_id: Optional[str] = None,
+        expiration_time: Optional[int] = None,
+        ip_access_list: Optional[IpAccessList] = None,
+        owner: Optional[str] = None,
+        properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None,
+        sharing_code: Optional[str] = None,
+    ) -> RecipientInfo:
         """Create a share recipient.
-        
+
         Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
         be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
-        
+
         :param name: str
           Name of Recipient.
         :param authentication_type: :class:`AuthenticationType`
@@ -1856,70 +2230,96 @@ def create(self,
         :param sharing_code: str (optional)
           The one-time sharing code provided by the data recipient. This field is only present when the
           __authentication_type__ is **DATABRICKS**.
-        
+
         :returns: :class:`RecipientInfo`
         """
         body = {}
-        if authentication_type is not None: body['authentication_type'] = authentication_type.value
-        if comment is not None: body['comment'] = comment
+        if authentication_type is not None:
+            body["authentication_type"] = authentication_type.value
+        if comment is not None:
+            body["comment"] = comment
         if data_recipient_global_metastore_id is not None:
-            body['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id
-        if expiration_time is not None: body['expiration_time'] = expiration_time
-        if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict()
-        if name is not None: body['name'] = name
-        if owner is not None: body['owner'] = owner
-        if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict()
-        if sharing_code is not None: body['sharing_code'] = sharing_code
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/recipients', body=body, headers=headers)
+            body["data_recipient_global_metastore_id"] = data_recipient_global_metastore_id
+        if expiration_time is not None:
+            body["expiration_time"] = expiration_time
+        if ip_access_list is not None:
+            body["ip_access_list"] = ip_access_list.as_dict()
+        if name is not None:
+            body["name"] = name
+        if owner is not None:
+            body["owner"] = owner
+        if properties_kvpairs is not None:
+            body["properties_kvpairs"] = properties_kvpairs.as_dict()
+        if sharing_code is not None:
+            body["sharing_code"] = sharing_code
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.1/unity-catalog/recipients",
+            body=body,
+            headers=headers,
+        )
         return RecipientInfo.from_dict(res)
 
     def delete(self, name: str):
         """Delete a share recipient.
-        
+
         Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.
-        
+
         :param name: str
           Name of the recipient.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/recipients/{name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.1/unity-catalog/recipients/{name}",
+            headers=headers,
+        )
 
     def get(self, name: str) -> RecipientInfo:
         """Get a share recipient.
-        
+
         Gets a share recipient from the metastore if:
-        
+
         * the caller is the owner of the share recipient, or: * is a metastore admin
-        
+
         :param name: str
           Name of the recipient.
-        
+
         :returns: :class:`RecipientInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{name}', headers=headers)
+        res = self._api.do("GET", f"/api/2.1/unity-catalog/recipients/{name}", headers=headers)
         return RecipientInfo.from_dict(res)
 
-    def list(self,
-             *,
-             data_recipient_global_metastore_id: Optional[str] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[RecipientInfo]:
+    def list(
+        self,
+        *,
+        data_recipient_global_metastore_id: Optional[str] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[RecipientInfo]:
         """List share recipients.
-        
+
         Gets an array of all share recipients within the current metastore where:
-        
+
         * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific
         ordering of the elements in the array.
-        
+
         :param data_recipient_global_metastore_id: str (optional)
           If not provided, all recipients will be returned. If no recipients exist with this ID, no results
           will be returned.
@@ -1933,63 +2333,80 @@ def list(self,
           from the response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`RecipientInfo`
         """
 
         query = {}
         if data_recipient_global_metastore_id is not None:
-            query['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+            query["data_recipient_global_metastore_id"] = data_recipient_global_metastore_id
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers)
-            if 'recipients' in json:
-                for v in json['recipients']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/recipients",
+                query=query,
+                headers=headers,
+            )
+            if "recipients" in json:
+                for v in json["recipients"]:
                     yield RecipientInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
     def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo:
         """Rotate a token.
-        
+
         Refreshes the specified recipient's delta sharing authentication token with the provided token info.
         The caller must be the owner of the recipient.
-        
+
         :param name: str
           The name of the Recipient.
         :param existing_token_expire_in_seconds: int
           The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
           existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
           the existing token immediately, negative number will return an error.
-        
+
         :returns: :class:`RecipientInfo`
         """
         body = {}
         if existing_token_expire_in_seconds is not None:
-            body['existing_token_expire_in_seconds'] = existing_token_expire_in_seconds
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.1/unity-catalog/recipients/{name}/rotate-token',
-                           body=body,
-                           headers=headers)
+            body["existing_token_expire_in_seconds"] = existing_token_expire_in_seconds
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.1/unity-catalog/recipients/{name}/rotate-token",
+            body=body,
+            headers=headers,
+        )
         return RecipientInfo.from_dict(res)
 
-    def share_permissions(self,
-                          name: str,
-                          *,
-                          max_results: Optional[int] = None,
-                          page_token: Optional[str] = None) -> GetRecipientSharePermissionsResponse:
+    def share_permissions(
+        self,
+        name: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> GetRecipientSharePermissionsResponse:
         """Get recipient share permissions.
-        
+
         Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the
         owner of the Recipient.
-        
+
         :param name: str
           The name of the Recipient.
         :param max_results: int (optional)
@@ -2002,36 +2419,44 @@ def share_permissions(self,
           unset from the response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: :class:`GetRecipientSharePermissionsResponse`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/recipients/{name}/share-permissions',
-                           query=query,
-                           headers=headers)
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/recipients/{name}/share-permissions",
+            query=query,
+            headers=headers,
+        )
         return GetRecipientSharePermissionsResponse.from_dict(res)
 
-    def update(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               expiration_time: Optional[int] = None,
-               ip_access_list: Optional[IpAccessList] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None) -> RecipientInfo:
+    def update(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        expiration_time: Optional[int] = None,
+        ip_access_list: Optional[IpAccessList] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None,
+    ) -> RecipientInfo:
         """Update a share recipient.
-        
+
         Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
         the recipient. If the recipient name will be updated, the user must be both a metastore admin and the
         owner of the recipient.
-        
+
         :param name: str
           Name of the recipient.
         :param comment: str (optional)
@@ -2048,19 +2473,33 @@ def update(self,
           Recipient properties as map of string key-value pairs. When provided in update request, the
           specified properties will override the existing properties. To add and remove properties, one would
           need to perform a read-modify-write.
-        
+
         :returns: :class:`RecipientInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if expiration_time is not None: body['expiration_time'] = expiration_time
-        if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict()
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if expiration_time is not None:
+            body["expiration_time"] = expiration_time
+        if ip_access_list is not None:
+            body["ip_access_list"] = ip_access_list.as_dict()
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if properties_kvpairs is not None:
+            body["properties_kvpairs"] = properties_kvpairs.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/recipients/{name}",
+            body=body,
+            headers=headers,
+        )
         return RecipientInfo.from_dict(res)
 
 
@@ -2073,79 +2512,99 @@ class SharesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               storage_root: Optional[str] = None) -> ShareInfo:
+    def create(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        storage_root: Optional[str] = None,
+    ) -> ShareInfo:
         """Create a share.
-        
+
         Creates a new share for data objects. Data objects can be added after creation with **update**. The
         caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore.
-        
+
         :param name: str
           Name of the share.
         :param comment: str (optional)
           User-provided free-form text description.
         :param storage_root: str (optional)
           Storage root URL for the share.
-        
+
         :returns: :class:`ShareInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if storage_root is not None: body['storage_root'] = storage_root
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/shares', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if name is not None:
+            body["name"] = name
+        if storage_root is not None:
+            body["storage_root"] = storage_root
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.1/unity-catalog/shares", body=body, headers=headers)
         return ShareInfo.from_dict(res)
 
     def delete(self, name: str):
         """Delete a share.
-        
+
         Deletes a data object share from the metastore. The caller must be an owner of the share.
-        
+
         :param name: str
           The name of the share.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/shares/{name}', headers=headers)
+        self._api.do("DELETE", f"/api/2.1/unity-catalog/shares/{name}", headers=headers)
 
     def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> ShareInfo:
         """Get a share.
-        
+
         Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the
         share.
-        
+
         :param name: str
           The name of the share.
         :param include_shared_data: bool (optional)
           Query for data to include in the share.
-        
+
         :returns: :class:`ShareInfo`
         """
 
         query = {}
-        if include_shared_data is not None: query['include_shared_data'] = include_shared_data
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}', query=query, headers=headers)
+        if include_shared_data is not None:
+            query["include_shared_data"] = include_shared_data
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/shares/{name}",
+            query=query,
+            headers=headers,
+        )
         return ShareInfo.from_dict(res)
 
-    def list(self,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ShareInfo]:
+    def list(
+        self,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ShareInfo]:
         """List shares.
-        
+
         Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
         owner of the share. There is no guarantee of a specific ordering of the elements in the array.
-        
+
         :param max_results: int (optional)
           Maximum number of shares to return. - when set to 0, the page length is set to a server configured
           value (recommended); - when set to a value greater than 0, the page length is the minimum of this
@@ -2156,35 +2615,47 @@ def list(self,
           response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: Iterator over :class:`ShareInfo`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        if "max_results" not in query: query['max_results'] = 0
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        if "max_results" not in query:
+            query["max_results"] = 0
         while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers)
-            if 'shares' in json:
-                for v in json['shares']:
+            json = self._api.do(
+                "GET",
+                "/api/2.1/unity-catalog/shares",
+                query=query,
+                headers=headers,
+            )
+            if "shares" in json:
+                for v in json["shares"]:
                     yield ShareInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def share_permissions(self,
-                          name: str,
-                          *,
-                          max_results: Optional[int] = None,
-                          page_token: Optional[str] = None) -> catalog.PermissionsList:
+            query["page_token"] = json["next_page_token"]
+
+    def share_permissions(
+        self,
+        name: str,
+        *,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> catalog.PermissionsList:
         """Get permissions.
-        
+
         Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the
         owner of the share.
-        
+
         :param name: str
           The name of the share.
         :param max_results: int (optional)
@@ -2197,47 +2668,55 @@ def share_permissions(self,
           unset from the response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
+
         :returns: :class:`PermissionsList`
         """
 
         query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET',
-                           f'/api/2.1/unity-catalog/shares/{name}/permissions',
-                           query=query,
-                           headers=headers)
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do(
+            "GET",
+            f"/api/2.1/unity-catalog/shares/{name}/permissions",
+            query=query,
+            headers=headers,
+        )
         return catalog.PermissionsList.from_dict(res)
 
-    def update(self,
-               name: str,
-               *,
-               comment: Optional[str] = None,
-               new_name: Optional[str] = None,
-               owner: Optional[str] = None,
-               storage_root: Optional[str] = None,
-               updates: Optional[List[SharedDataObjectUpdate]] = None) -> ShareInfo:
+    def update(
+        self,
+        name: str,
+        *,
+        comment: Optional[str] = None,
+        new_name: Optional[str] = None,
+        owner: Optional[str] = None,
+        storage_root: Optional[str] = None,
+        updates: Optional[List[SharedDataObjectUpdate]] = None,
+    ) -> ShareInfo:
         """Update a share.
-        
+
         Updates the share with the changes and data objects in the request. The caller must be the owner of
         the share or a metastore admin.
-        
+
         When the caller is a metastore admin, only the __owner__ field can be updated.
-        
+
         In the case that the share name is changed, **updateShare** requires that the caller is both the share
         owner and a metastore admin.
-        
+
         If there are notebook files in the share, the __storage_root__ field cannot be updated.
-        
+
         For each table that is added through this method, the share owner must also have **SELECT** privilege
         on the table. This privilege must be maintained indefinitely for recipients to be able to access the
         table. Typically, you should use a group as the share owner.
-        
+
         Table removals through **update** do not require additional privileges.
-        
+
         :param name: str
           The name of the share.
         :param comment: str (optional)
@@ -2250,34 +2729,49 @@ def update(self,
           Storage root URL for the share.
         :param updates: List[:class:`SharedDataObjectUpdate`] (optional)
           Array of shared data object updates.
-        
+
         :returns: :class:`ShareInfo`
         """
         body = {}
-        if comment is not None: body['comment'] = comment
-        if new_name is not None: body['new_name'] = new_name
-        if owner is not None: body['owner'] = owner
-        if storage_root is not None: body['storage_root'] = storage_root
-        if updates is not None: body['updates'] = [v.as_dict() for v in updates]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}', body=body, headers=headers)
+        if comment is not None:
+            body["comment"] = comment
+        if new_name is not None:
+            body["new_name"] = new_name
+        if owner is not None:
+            body["owner"] = owner
+        if storage_root is not None:
+            body["storage_root"] = storage_root
+        if updates is not None:
+            body["updates"] = [v.as_dict() for v in updates]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/shares/{name}",
+            body=body,
+            headers=headers,
+        )
         return ShareInfo.from_dict(res)
 
-    def update_permissions(self,
-                           name: str,
-                           *,
-                           changes: Optional[List[catalog.PermissionsChange]] = None,
-                           max_results: Optional[int] = None,
-                           page_token: Optional[str] = None):
+    def update_permissions(
+        self,
+        name: str,
+        *,
+        changes: Optional[List[catalog.PermissionsChange]] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ):
         """Update permissions.
-        
+
         Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an
         owner of the share.
-        
+
         For new recipient grants, the user must also be the owner of the recipients. recipient revocations do
         not require additional privileges.
-        
+
         :param name: str
           The name of the share.
         :param changes: List[:class:`PermissionsChange`] (optional)
@@ -2292,18 +2786,26 @@ def update_permissions(self,
           unset from the response.
         :param page_token: str (optional)
           Opaque pagination token to go to next page based on previous query.
-        
-        
+
+
         """
         body = {}
         query = {}
-        if changes is not None: body['changes'] = [v.as_dict() for v in changes]
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH',
-                     f'/api/2.1/unity-catalog/shares/{name}/permissions',
-                     query=query,
-                     body=body,
-                     headers=headers)
+        if changes is not None:
+            body["changes"] = [v.as_dict() for v in changes]
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.1/unity-catalog/shares/{name}/permissions",
+            query=query,
+            body=body,
+            headers=headers,
+        )
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index cfa94aaa7..5fd9fb6da 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -31,25 +31,33 @@ class AccessControl:
     def as_dict(self) -> dict:
         """Serializes the AccessControl into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
-        if self.user_name is not None: body['user_name'] = self.user_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AccessControl into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
-        if self.user_name is not None: body['user_name'] = self.user_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControl:
         """Deserializes the AccessControl from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', PermissionLevel),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", PermissionLevel),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -109,61 +117,93 @@ class Alert:
     def as_dict(self) -> dict:
         """Serializes the Alert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.condition: body['condition'] = self.condition.as_dict()
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
-        if self.state is not None: body['state'] = self.state.value
-        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.condition:
+            body["condition"] = self.condition.as_dict()
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state.value
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.trigger_time is not None:
+            body["trigger_time"] = self.trigger_time
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Alert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.condition: body['condition'] = self.condition
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
-        if self.state is not None: body['state'] = self.state
-        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.condition:
+            body["condition"] = self.condition
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
+        if self.state is not None:
+            body["state"] = self.state
+        if self.trigger_time is not None:
+            body["trigger_time"] = self.trigger_time
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Alert:
         """Deserializes the Alert from a dictionary."""
-        return cls(condition=_from_dict(d, 'condition', AlertCondition),
-                   create_time=d.get('create_time', None),
-                   custom_body=d.get('custom_body', None),
-                   custom_subject=d.get('custom_subject', None),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   notify_on_ok=d.get('notify_on_ok', None),
-                   owner_user_name=d.get('owner_user_name', None),
-                   parent_path=d.get('parent_path', None),
-                   query_id=d.get('query_id', None),
-                   seconds_to_retrigger=d.get('seconds_to_retrigger', None),
-                   state=_enum(d, 'state', AlertState),
-                   trigger_time=d.get('trigger_time', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            condition=_from_dict(d, "condition", AlertCondition),
+            create_time=d.get("create_time", None),
+            custom_body=d.get("custom_body", None),
+            custom_subject=d.get("custom_subject", None),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+            lifecycle_state=_enum(d, "lifecycle_state", LifecycleState),
+            notify_on_ok=d.get("notify_on_ok", None),
+            owner_user_name=d.get("owner_user_name", None),
+            parent_path=d.get("parent_path", None),
+            query_id=d.get("query_id", None),
+            seconds_to_retrigger=d.get("seconds_to_retrigger", None),
+            state=_enum(d, "state", AlertState),
+            trigger_time=d.get("trigger_time", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -183,28 +223,38 @@ class AlertCondition:
     def as_dict(self) -> dict:
         """Serializes the AlertCondition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value
-        if self.op is not None: body['op'] = self.op.value
-        if self.operand: body['operand'] = self.operand.as_dict()
-        if self.threshold: body['threshold'] = self.threshold.as_dict()
+        if self.empty_result_state is not None:
+            body["empty_result_state"] = self.empty_result_state.value
+        if self.op is not None:
+            body["op"] = self.op.value
+        if self.operand:
+            body["operand"] = self.operand.as_dict()
+        if self.threshold:
+            body["threshold"] = self.threshold.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertCondition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state
-        if self.op is not None: body['op'] = self.op
-        if self.operand: body['operand'] = self.operand
-        if self.threshold: body['threshold'] = self.threshold
+        if self.empty_result_state is not None:
+            body["empty_result_state"] = self.empty_result_state
+        if self.op is not None:
+            body["op"] = self.op
+        if self.operand:
+            body["operand"] = self.operand
+        if self.threshold:
+            body["threshold"] = self.threshold
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertCondition:
         """Deserializes the AlertCondition from a dictionary."""
-        return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState),
-                   op=_enum(d, 'op', AlertOperator),
-                   operand=_from_dict(d, 'operand', AlertConditionOperand),
-                   threshold=_from_dict(d, 'threshold', AlertConditionThreshold))
+        return cls(
+            empty_result_state=_enum(d, "empty_result_state", AlertState),
+            op=_enum(d, "op", AlertOperator),
+            operand=_from_dict(d, "operand", AlertConditionOperand),
+            threshold=_from_dict(d, "threshold", AlertConditionThreshold),
+        )
 
 
 @dataclass
@@ -214,19 +264,21 @@ class AlertConditionOperand:
     def as_dict(self) -> dict:
         """Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.column: body['column'] = self.column.as_dict()
+        if self.column:
+            body["column"] = self.column.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertConditionOperand into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.column: body['column'] = self.column
+        if self.column:
+            body["column"] = self.column
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand:
         """Deserializes the AlertConditionOperand from a dictionary."""
-        return cls(column=_from_dict(d, 'column', AlertOperandColumn))
+        return cls(column=_from_dict(d, "column", AlertOperandColumn))
 
 
 @dataclass
@@ -236,19 +288,21 @@ class AlertConditionThreshold:
     def as_dict(self) -> dict:
         """Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value: body['value'] = self.value.as_dict()
+        if self.value:
+            body["value"] = self.value.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertConditionThreshold into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value: body['value'] = self.value
+        if self.value:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold:
         """Deserializes the AlertConditionThreshold from a dictionary."""
-        return cls(value=_from_dict(d, 'value', AlertOperandValue))
+        return cls(value=_from_dict(d, "value", AlertOperandValue))
 
 
 @dataclass
@@ -258,19 +312,21 @@ class AlertOperandColumn:
     def as_dict(self) -> dict:
         """Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertOperandColumn into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn:
         """Deserializes the AlertOperandColumn from a dictionary."""
-        return cls(name=d.get('name', None))
+        return cls(name=d.get("name", None))
 
 
 @dataclass
@@ -284,36 +340,44 @@ class AlertOperandValue:
     def as_dict(self) -> dict:
         """Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bool_value is not None: body['bool_value'] = self.bool_value
-        if self.double_value is not None: body['double_value'] = self.double_value
-        if self.string_value is not None: body['string_value'] = self.string_value
+        if self.bool_value is not None:
+            body["bool_value"] = self.bool_value
+        if self.double_value is not None:
+            body["double_value"] = self.double_value
+        if self.string_value is not None:
+            body["string_value"] = self.string_value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertOperandValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bool_value is not None: body['bool_value'] = self.bool_value
-        if self.double_value is not None: body['double_value'] = self.double_value
-        if self.string_value is not None: body['string_value'] = self.string_value
+        if self.bool_value is not None:
+            body["bool_value"] = self.bool_value
+        if self.double_value is not None:
+            body["double_value"] = self.double_value
+        if self.string_value is not None:
+            body["string_value"] = self.string_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue:
         """Deserializes the AlertOperandValue from a dictionary."""
-        return cls(bool_value=d.get('bool_value', None),
-                   double_value=d.get('double_value', None),
-                   string_value=d.get('string_value', None))
+        return cls(
+            bool_value=d.get("bool_value", None),
+            double_value=d.get("double_value", None),
+            string_value=d.get("string_value", None),
+        )
 
 
 class AlertOperator(Enum):
 
-    EQUAL = 'EQUAL'
-    GREATER_THAN = 'GREATER_THAN'
-    GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
-    IS_NULL = 'IS_NULL'
-    LESS_THAN = 'LESS_THAN'
-    LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
-    NOT_EQUAL = 'NOT_EQUAL'
+    EQUAL = "EQUAL"
+    GREATER_THAN = "GREATER_THAN"
+    GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL"
+    IS_NULL = "IS_NULL"
+    LESS_THAN = "LESS_THAN"
+    LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL"
+    NOT_EQUAL = "NOT_EQUAL"
 
 
 @dataclass
@@ -351,45 +415,61 @@ class AlertOptions:
     def as_dict(self) -> dict:
         """Serializes the AlertOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.column is not None: body['column'] = self.column
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value
-        if self.muted is not None: body['muted'] = self.muted
-        if self.op is not None: body['op'] = self.op
-        if self.value: body['value'] = self.value
+        if self.column is not None:
+            body["column"] = self.column
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.empty_result_state is not None:
+            body["empty_result_state"] = self.empty_result_state.value
+        if self.muted is not None:
+            body["muted"] = self.muted
+        if self.op is not None:
+            body["op"] = self.op
+        if self.value:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.column is not None: body['column'] = self.column
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state
-        if self.muted is not None: body['muted'] = self.muted
-        if self.op is not None: body['op'] = self.op
-        if self.value: body['value'] = self.value
+        if self.column is not None:
+            body["column"] = self.column
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.empty_result_state is not None:
+            body["empty_result_state"] = self.empty_result_state
+        if self.muted is not None:
+            body["muted"] = self.muted
+        if self.op is not None:
+            body["op"] = self.op
+        if self.value:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOptions:
         """Deserializes the AlertOptions from a dictionary."""
-        return cls(column=d.get('column', None),
-                   custom_body=d.get('custom_body', None),
-                   custom_subject=d.get('custom_subject', None),
-                   empty_result_state=_enum(d, 'empty_result_state', AlertOptionsEmptyResultState),
-                   muted=d.get('muted', None),
-                   op=d.get('op', None),
-                   value=d.get('value', None))
+        return cls(
+            column=d.get("column", None),
+            custom_body=d.get("custom_body", None),
+            custom_subject=d.get("custom_subject", None),
+            empty_result_state=_enum(d, "empty_result_state", AlertOptionsEmptyResultState),
+            muted=d.get("muted", None),
+            op=d.get("op", None),
+            value=d.get("value", None),
+        )
 
 
 class AlertOptionsEmptyResultState(Enum):
     """State that alert evaluates to when query result is empty."""
 
-    OK = 'ok'
-    TRIGGERED = 'triggered'
-    UNKNOWN = 'unknown'
+    OK = "ok"
+    TRIGGERED = "triggered"
+    UNKNOWN = "unknown"
 
 
 @dataclass
@@ -442,62 +522,90 @@ class AlertQuery:
     def as_dict(self) -> dict:
         """Serializes the AlertQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.query is not None: body['query'] = self.query
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_archived is not None:
+            body["is_archived"] = self.is_archived
+        if self.is_draft is not None:
+            body["is_draft"] = self.is_draft
+        if self.is_safe is not None:
+            body["is_safe"] = self.is_safe
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.query is not None:
+            body["query"] = self.query
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AlertQuery into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query is not None: body['query'] = self.query
-        if self.tags: body['tags'] = self.tags
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_archived is not None:
+            body["is_archived"] = self.is_archived
+        if self.is_draft is not None:
+            body["is_draft"] = self.is_draft
+        if self.is_safe is not None:
+            body["is_safe"] = self.is_safe
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.query is not None:
+            body["query"] = self.query
+        if self.tags:
+            body["tags"] = self.tags
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertQuery:
         """Deserializes the AlertQuery from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   data_source_id=d.get('data_source_id', None),
-                   description=d.get('description', None),
-                   id=d.get('id', None),
-                   is_archived=d.get('is_archived', None),
-                   is_draft=d.get('is_draft', None),
-                   is_safe=d.get('is_safe', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', QueryOptions),
-                   query=d.get('query', None),
-                   tags=d.get('tags', None),
-                   updated_at=d.get('updated_at', None),
-                   user_id=d.get('user_id', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            data_source_id=d.get("data_source_id", None),
+            description=d.get("description", None),
+            id=d.get("id", None),
+            is_archived=d.get("is_archived", None),
+            is_draft=d.get("is_draft", None),
+            is_safe=d.get("is_safe", None),
+            name=d.get("name", None),
+            options=_from_dict(d, "options", QueryOptions),
+            query=d.get("query", None),
+            tags=d.get("tags", None),
+            updated_at=d.get("updated_at", None),
+            user_id=d.get("user_id", None),
+        )
 
 
 class AlertState(Enum):
 
-    OK = 'OK'
-    TRIGGERED = 'TRIGGERED'
-    UNKNOWN = 'UNKNOWN'
+    OK = "OK"
+    TRIGGERED = "TRIGGERED"
+    UNKNOWN = "UNKNOWN"
 
 
 @dataclass
@@ -521,28 +629,38 @@ class BaseChunkInfo:
     def as_dict(self) -> dict:
         """Serializes the BaseChunkInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.byte_count is not None: body['byte_count'] = self.byte_count
-        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        if self.byte_count is not None:
+            body["byte_count"] = self.byte_count
+        if self.chunk_index is not None:
+            body["chunk_index"] = self.chunk_index
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.row_offset is not None:
+            body["row_offset"] = self.row_offset
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the BaseChunkInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.byte_count is not None: body['byte_count'] = self.byte_count
-        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        if self.byte_count is not None:
+            body["byte_count"] = self.byte_count
+        if self.chunk_index is not None:
+            body["chunk_index"] = self.chunk_index
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.row_offset is not None:
+            body["row_offset"] = self.row_offset
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseChunkInfo:
         """Deserializes the BaseChunkInfo from a dictionary."""
-        return cls(byte_count=d.get('byte_count', None),
-                   chunk_index=d.get('chunk_index', None),
-                   row_count=d.get('row_count', None),
-                   row_offset=d.get('row_offset', None))
+        return cls(
+            byte_count=d.get("byte_count", None),
+            chunk_index=d.get("chunk_index", None),
+            row_count=d.get("row_count", None),
+            row_offset=d.get("row_offset", None),
+        )
 
 
 @dataclass
@@ -576,21 +694,28 @@ class Channel:
     def as_dict(self) -> dict:
         """Serializes the Channel into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
-        if self.name is not None: body['name'] = self.name.value
+        if self.dbsql_version is not None:
+            body["dbsql_version"] = self.dbsql_version
+        if self.name is not None:
+            body["name"] = self.name.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Channel into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
-        if self.name is not None: body['name'] = self.name
+        if self.dbsql_version is not None:
+            body["dbsql_version"] = self.dbsql_version
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Channel:
         """Deserializes the Channel from a dictionary."""
-        return cls(dbsql_version=d.get('dbsql_version', None), name=_enum(d, 'name', ChannelName))
+        return cls(
+            dbsql_version=d.get("dbsql_version", None),
+            name=_enum(d, "name", ChannelName),
+        )
 
 
 @dataclass
@@ -606,29 +731,36 @@ class ChannelInfo:
     def as_dict(self) -> dict:
         """Serializes the ChannelInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
-        if self.name is not None: body['name'] = self.name.value
+        if self.dbsql_version is not None:
+            body["dbsql_version"] = self.dbsql_version
+        if self.name is not None:
+            body["name"] = self.name.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ChannelInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
-        if self.name is not None: body['name'] = self.name
+        if self.dbsql_version is not None:
+            body["dbsql_version"] = self.dbsql_version
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChannelInfo:
         """Deserializes the ChannelInfo from a dictionary."""
-        return cls(dbsql_version=d.get('dbsql_version', None), name=_enum(d, 'name', ChannelName))
+        return cls(
+            dbsql_version=d.get("dbsql_version", None),
+            name=_enum(d, "name", ChannelName),
+        )
 
 
 class ChannelName(Enum):
 
-    CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT'
-    CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM'
-    CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW'
-    CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS'
+    CHANNEL_NAME_CURRENT = "CHANNEL_NAME_CURRENT"
+    CHANNEL_NAME_CUSTOM = "CHANNEL_NAME_CUSTOM"
+    CHANNEL_NAME_PREVIEW = "CHANNEL_NAME_PREVIEW"
+    CHANNEL_NAME_PREVIOUS = "CHANNEL_NAME_PREVIOUS"
 
 
 @dataclass
@@ -657,51 +789,67 @@ def as_dict(self) -> dict:
         """Serializes the ClientConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allow_custom_js_visualizations is not None:
-            body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations
-        if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads
-        if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares
-        if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions
-        if self.date_format is not None: body['date_format'] = self.date_format
-        if self.date_time_format is not None: body['date_time_format'] = self.date_time_format
-        if self.disable_publish is not None: body['disable_publish'] = self.disable_publish
+            body["allow_custom_js_visualizations"] = self.allow_custom_js_visualizations
+        if self.allow_downloads is not None:
+            body["allow_downloads"] = self.allow_downloads
+        if self.allow_external_shares is not None:
+            body["allow_external_shares"] = self.allow_external_shares
+        if self.allow_subscriptions is not None:
+            body["allow_subscriptions"] = self.allow_subscriptions
+        if self.date_format is not None:
+            body["date_format"] = self.date_format
+        if self.date_time_format is not None:
+            body["date_time_format"] = self.date_time_format
+        if self.disable_publish is not None:
+            body["disable_publish"] = self.disable_publish
         if self.enable_legacy_autodetect_types is not None:
-            body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types
+            body["enable_legacy_autodetect_types"] = self.enable_legacy_autodetect_types
         if self.feature_show_permissions_control is not None:
-            body['feature_show_permissions_control'] = self.feature_show_permissions_control
-        if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar
+            body["feature_show_permissions_control"] = self.feature_show_permissions_control
+        if self.hide_plotly_mode_bar is not None:
+            body["hide_plotly_mode_bar"] = self.hide_plotly_mode_bar
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ClientConfig into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.allow_custom_js_visualizations is not None:
-            body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations
-        if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads
-        if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares
-        if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions
-        if self.date_format is not None: body['date_format'] = self.date_format
-        if self.date_time_format is not None: body['date_time_format'] = self.date_time_format
-        if self.disable_publish is not None: body['disable_publish'] = self.disable_publish
+            body["allow_custom_js_visualizations"] = self.allow_custom_js_visualizations
+        if self.allow_downloads is not None:
+            body["allow_downloads"] = self.allow_downloads
+        if self.allow_external_shares is not None:
+            body["allow_external_shares"] = self.allow_external_shares
+        if self.allow_subscriptions is not None:
+            body["allow_subscriptions"] = self.allow_subscriptions
+        if self.date_format is not None:
+            body["date_format"] = self.date_format
+        if self.date_time_format is not None:
+            body["date_time_format"] = self.date_time_format
+        if self.disable_publish is not None:
+            body["disable_publish"] = self.disable_publish
         if self.enable_legacy_autodetect_types is not None:
-            body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types
+            body["enable_legacy_autodetect_types"] = self.enable_legacy_autodetect_types
         if self.feature_show_permissions_control is not None:
-            body['feature_show_permissions_control'] = self.feature_show_permissions_control
-        if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar
+            body["feature_show_permissions_control"] = self.feature_show_permissions_control
+        if self.hide_plotly_mode_bar is not None:
+            body["hide_plotly_mode_bar"] = self.hide_plotly_mode_bar
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClientConfig:
         """Deserializes the ClientConfig from a dictionary."""
-        return cls(allow_custom_js_visualizations=d.get('allow_custom_js_visualizations', None),
-                   allow_downloads=d.get('allow_downloads', None),
-                   allow_external_shares=d.get('allow_external_shares', None),
-                   allow_subscriptions=d.get('allow_subscriptions', None),
-                   date_format=d.get('date_format', None),
-                   date_time_format=d.get('date_time_format', None),
-                   disable_publish=d.get('disable_publish', None),
-                   enable_legacy_autodetect_types=d.get('enable_legacy_autodetect_types', None),
-                   feature_show_permissions_control=d.get('feature_show_permissions_control', None),
-                   hide_plotly_mode_bar=d.get('hide_plotly_mode_bar', None))
+        return cls(
+            allow_custom_js_visualizations=d.get("allow_custom_js_visualizations", None),
+            allow_downloads=d.get("allow_downloads", None),
+            allow_external_shares=d.get("allow_external_shares", None),
+            allow_subscriptions=d.get("allow_subscriptions", None),
+            date_format=d.get("date_format", None),
+            date_time_format=d.get("date_time_format", None),
+            disable_publish=d.get("disable_publish", None),
+            enable_legacy_autodetect_types=d.get("enable_legacy_autodetect_types", None),
+            feature_show_permissions_control=d.get("feature_show_permissions_control", None),
+            hide_plotly_mode_bar=d.get("hide_plotly_mode_bar", None),
+        )
 
 
 @dataclass
@@ -732,62 +880,78 @@ class ColumnInfo:
     def as_dict(self) -> dict:
         """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_name is not None: body['type_name'] = self.type_name.value
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.type_interval_type is not None:
+            body["type_interval_type"] = self.type_interval_type
+        if self.type_name is not None:
+            body["type_name"] = self.type_name.value
+        if self.type_precision is not None:
+            body["type_precision"] = self.type_precision
+        if self.type_scale is not None:
+            body["type_scale"] = self.type_scale
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_name is not None: body['type_name'] = self.type_name
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
+        if self.name is not None:
+            body["name"] = self.name
+        if self.position is not None:
+            body["position"] = self.position
+        if self.type_interval_type is not None:
+            body["type_interval_type"] = self.type_interval_type
+        if self.type_name is not None:
+            body["type_name"] = self.type_name
+        if self.type_precision is not None:
+            body["type_precision"] = self.type_precision
+        if self.type_scale is not None:
+            body["type_scale"] = self.type_scale
+        if self.type_text is not None:
+            body["type_text"] = self.type_text
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
-        return cls(name=d.get('name', None),
-                   position=d.get('position', None),
-                   type_interval_type=d.get('type_interval_type', None),
-                   type_name=_enum(d, 'type_name', ColumnInfoTypeName),
-                   type_precision=d.get('type_precision', None),
-                   type_scale=d.get('type_scale', None),
-                   type_text=d.get('type_text', None))
+        return cls(
+            name=d.get("name", None),
+            position=d.get("position", None),
+            type_interval_type=d.get("type_interval_type", None),
+            type_name=_enum(d, "type_name", ColumnInfoTypeName),
+            type_precision=d.get("type_precision", None),
+            type_scale=d.get("type_scale", None),
+            type_text=d.get("type_text", None),
+        )
 
 
 class ColumnInfoTypeName(Enum):
     """The name of the base data type. This doesn't include details for complex types such as STRUCT,
     MAP or ARRAY."""
 
-    ARRAY = 'ARRAY'
-    BINARY = 'BINARY'
-    BOOLEAN = 'BOOLEAN'
-    BYTE = 'BYTE'
-    CHAR = 'CHAR'
-    DATE = 'DATE'
-    DECIMAL = 'DECIMAL'
-    DOUBLE = 'DOUBLE'
-    FLOAT = 'FLOAT'
-    INT = 'INT'
-    INTERVAL = 'INTERVAL'
-    LONG = 'LONG'
-    MAP = 'MAP'
-    NULL = 'NULL'
-    SHORT = 'SHORT'
-    STRING = 'STRING'
-    STRUCT = 'STRUCT'
-    TIMESTAMP = 'TIMESTAMP'
-    USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
+    ARRAY = "ARRAY"
+    BINARY = "BINARY"
+    BOOLEAN = "BOOLEAN"
+    BYTE = "BYTE"
+    CHAR = "CHAR"
+    DATE = "DATE"
+    DECIMAL = "DECIMAL"
+    DOUBLE = "DOUBLE"
+    FLOAT = "FLOAT"
+    INT = "INT"
+    INTERVAL = "INTERVAL"
+    LONG = "LONG"
+    MAP = "MAP"
+    NULL = "NULL"
+    SHORT = "SHORT"
+    STRING = "STRING"
+    STRUCT = "STRUCT"
+    TIMESTAMP = "TIMESTAMP"
+    USER_DEFINED_TYPE = "USER_DEFINED_TYPE"
 
 
 @dataclass
@@ -811,31 +975,43 @@ class CreateAlert:
     def as_dict(self) -> dict:
         """Serializes the CreateAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.rearm is not None:
+            body["rearm"] = self.rearm
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.rearm is not None:
+            body["rearm"] = self.rearm
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlert:
         """Deserializes the CreateAlert from a dictionary."""
-        return cls(name=d.get('name', None),
-                   options=_from_dict(d, 'options', AlertOptions),
-                   parent=d.get('parent', None),
-                   query_id=d.get('query_id', None),
-                   rearm=d.get('rearm', None))
+        return cls(
+            name=d.get("name", None),
+            options=_from_dict(d, "options", AlertOptions),
+            parent=d.get("parent", None),
+            query_id=d.get("query_id", None),
+            rearm=d.get("rearm", None),
+        )
 
 
 @dataclass
@@ -845,19 +1021,21 @@ class CreateAlertRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert: body['alert'] = self.alert.as_dict()
+        if self.alert:
+            body["alert"] = self.alert.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert: body['alert'] = self.alert
+        if self.alert:
+            body["alert"] = self.alert
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest:
         """Deserializes the CreateAlertRequest from a dictionary."""
-        return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert))
+        return cls(alert=_from_dict(d, "alert", CreateAlertRequestAlert))
 
 
 @dataclass
@@ -895,40 +1073,58 @@ class CreateAlertRequestAlert:
     def as_dict(self) -> dict:
         """Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.condition: body['condition'] = self.condition.as_dict()
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.condition:
+            body["condition"] = self.condition.as_dict()
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateAlertRequestAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.condition: body['condition'] = self.condition
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.condition:
+            body["condition"] = self.condition
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
         """Deserializes the CreateAlertRequestAlert from a dictionary."""
-        return cls(condition=_from_dict(d, 'condition', AlertCondition),
-                   custom_body=d.get('custom_body', None),
-                   custom_subject=d.get('custom_subject', None),
-                   display_name=d.get('display_name', None),
-                   notify_on_ok=d.get('notify_on_ok', None),
-                   parent_path=d.get('parent_path', None),
-                   query_id=d.get('query_id', None),
-                   seconds_to_retrigger=d.get('seconds_to_retrigger', None))
+        return cls(
+            condition=_from_dict(d, "condition", AlertCondition),
+            custom_body=d.get("custom_body", None),
+            custom_subject=d.get("custom_subject", None),
+            display_name=d.get("display_name", None),
+            notify_on_ok=d.get("notify_on_ok", None),
+            parent_path=d.get("parent_path", None),
+            query_id=d.get("query_id", None),
+            seconds_to_retrigger=d.get("seconds_to_retrigger", None),
+        )
 
 
 @dataclass
@@ -938,19 +1134,21 @@ class CreateQueryRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.query: body['query'] = self.query.as_dict()
+        if self.query:
+            body["query"] = self.query.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.query: body['query'] = self.query
+        if self.query:
+            body["query"] = self.query
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest:
         """Deserializes the CreateQueryRequest from a dictionary."""
-        return cls(query=_from_dict(d, 'query', CreateQueryRequestQuery))
+        return cls(query=_from_dict(d, "query", CreateQueryRequestQuery))
 
 
 @dataclass
@@ -990,49 +1188,73 @@ class CreateQueryRequestQuery:
     def as_dict(self) -> dict:
         """Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode.value
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateQueryRequestQuery into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.parameters: body['parameters'] = self.parameters
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = self.tags
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = self.tags
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery:
         """Deserializes the CreateQueryRequestQuery from a dictionary."""
-        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
-                   catalog=d.get('catalog', None),
-                   description=d.get('description', None),
-                   display_name=d.get('display_name', None),
-                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
-                   parent_path=d.get('parent_path', None),
-                   query_text=d.get('query_text', None),
-                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
-                   schema=d.get('schema', None),
-                   tags=d.get('tags', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            apply_auto_limit=d.get("apply_auto_limit", None),
+            catalog=d.get("catalog", None),
+            description=d.get("description", None),
+            display_name=d.get("display_name", None),
+            parameters=_repeated_dict(d, "parameters", QueryParameter),
+            parent_path=d.get("parent_path", None),
+            query_text=d.get("query_text", None),
+            run_as_mode=_enum(d, "run_as_mode", RunAsMode),
+            schema=d.get("schema", None),
+            tags=d.get("tags", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -1042,19 +1264,21 @@ class CreateVisualizationRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.visualization: body['visualization'] = self.visualization.as_dict()
+        if self.visualization:
+            body["visualization"] = self.visualization.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.visualization: body['visualization'] = self.visualization
+        if self.visualization:
+            body["visualization"] = self.visualization
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest:
         """Deserializes the CreateVisualizationRequest from a dictionary."""
-        return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization))
+        return cls(visualization=_from_dict(d, "visualization", CreateVisualizationRequestVisualization))
 
 
 @dataclass
@@ -1079,31 +1303,43 @@ class CreateVisualizationRequestVisualization:
     def as_dict(self) -> dict:
         """Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
-        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
-        if self.type is not None: body['type'] = self.type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.serialized_options is not None:
+            body["serialized_options"] = self.serialized_options
+        if self.serialized_query_plan is not None:
+            body["serialized_query_plan"] = self.serialized_query_plan
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
-        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
-        if self.type is not None: body['type'] = self.type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.serialized_options is not None:
+            body["serialized_options"] = self.serialized_options
+        if self.serialized_query_plan is not None:
+            body["serialized_query_plan"] = self.serialized_query_plan
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization:
         """Deserializes the CreateVisualizationRequestVisualization from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   query_id=d.get('query_id', None),
-                   serialized_options=d.get('serialized_options', None),
-                   serialized_query_plan=d.get('serialized_query_plan', None),
-                   type=d.get('type', None))
+        return cls(
+            display_name=d.get("display_name", None),
+            query_id=d.get("query_id", None),
+            serialized_options=d.get("serialized_options", None),
+            serialized_query_plan=d.get("serialized_query_plan", None),
+            type=d.get("type", None),
+        )
 
 
 @dataclass
@@ -1180,67 +1416,92 @@ class CreateWarehouseRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel.as_dict()
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel.as_dict()
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
         if self.spot_instance_policy is not None:
-            body['spot_instance_policy'] = self.spot_instance_policy.value
-        if self.tags: body['tags'] = self.tags.as_dict()
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
+            body["spot_instance_policy"] = self.spot_instance_policy.value
+        if self.tags:
+            body["tags"] = self.tags.as_dict()
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
-        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
-        if self.tags: body['tags'] = self.tags
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
+        if self.spot_instance_policy is not None:
+            body["spot_instance_policy"] = self.spot_instance_policy
+        if self.tags:
+            body["tags"] = self.tags
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest:
         """Deserializes the CreateWarehouseRequest from a dictionary."""
-        return cls(auto_stop_mins=d.get('auto_stop_mins', None),
-                   channel=_from_dict(d, 'channel', Channel),
-                   cluster_size=d.get('cluster_size', None),
-                   creator_name=d.get('creator_name', None),
-                   enable_photon=d.get('enable_photon', None),
-                   enable_serverless_compute=d.get('enable_serverless_compute', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   max_num_clusters=d.get('max_num_clusters', None),
-                   min_num_clusters=d.get('min_num_clusters', None),
-                   name=d.get('name', None),
-                   spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy),
-                   tags=_from_dict(d, 'tags', EndpointTags),
-                   warehouse_type=_enum(d, 'warehouse_type', CreateWarehouseRequestWarehouseType))
+        return cls(
+            auto_stop_mins=d.get("auto_stop_mins", None),
+            channel=_from_dict(d, "channel", Channel),
+            cluster_size=d.get("cluster_size", None),
+            creator_name=d.get("creator_name", None),
+            enable_photon=d.get("enable_photon", None),
+            enable_serverless_compute=d.get("enable_serverless_compute", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            max_num_clusters=d.get("max_num_clusters", None),
+            min_num_clusters=d.get("min_num_clusters", None),
+            name=d.get("name", None),
+            spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy),
+            tags=_from_dict(d, "tags", EndpointTags),
+            warehouse_type=_enum(d, "warehouse_type", CreateWarehouseRequestWarehouseType),
+        )
 
 
 class CreateWarehouseRequestWarehouseType(Enum):
     """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO`
     and also set the field `enable_serverless_compute` to `true`."""
 
-    CLASSIC = 'CLASSIC'
-    PRO = 'PRO'
-    TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED'
+    CLASSIC = "CLASSIC"
+    PRO = "PRO"
+    TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED"
 
 
 @dataclass
@@ -1251,19 +1512,21 @@ class CreateWarehouseResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateWarehouseResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
+        if self.id is not None:
+            body["id"] = self.id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseResponse:
         """Deserializes the CreateWarehouseResponse from a dictionary."""
-        return cls(id=d.get('id', None))
+        return cls(id=d.get("id", None))
 
 
 @dataclass
@@ -1289,34 +1552,48 @@ class CreateWidget:
     def as_dict(self) -> dict:
         """Serializes the CreateWidget into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.id is not None: body['id'] = self.id
-        if self.options: body['options'] = self.options.as_dict()
-        if self.text is not None: body['text'] = self.text
-        if self.visualization_id is not None: body['visualization_id'] = self.visualization_id
-        if self.width is not None: body['width'] = self.width
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.text is not None:
+            body["text"] = self.text
+        if self.visualization_id is not None:
+            body["visualization_id"] = self.visualization_id
+        if self.width is not None:
+            body["width"] = self.width
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateWidget into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.id is not None: body['id'] = self.id
-        if self.options: body['options'] = self.options
-        if self.text is not None: body['text'] = self.text
-        if self.visualization_id is not None: body['visualization_id'] = self.visualization_id
-        if self.width is not None: body['width'] = self.width
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.options:
+            body["options"] = self.options
+        if self.text is not None:
+            body["text"] = self.text
+        if self.visualization_id is not None:
+            body["visualization_id"] = self.visualization_id
+        if self.width is not None:
+            body["width"] = self.width
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWidget:
         """Deserializes the CreateWidget from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   id=d.get('id', None),
-                   options=_from_dict(d, 'options', WidgetOptions),
-                   text=d.get('text', None),
-                   visualization_id=d.get('visualization_id', None),
-                   width=d.get('width', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            id=d.get("id", None),
+            options=_from_dict(d, "options", WidgetOptions),
+            text=d.get("text", None),
+            visualization_id=d.get("visualization_id", None),
+            width=d.get("width", None),
+        )
 
 
 @dataclass
@@ -1379,69 +1656,103 @@ class Dashboard:
     def as_dict(self) -> dict:
         """Serializes the Dashboard into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.can_edit is not None: body['can_edit'] = self.can_edit
-        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.can_edit is not None:
+            body["can_edit"] = self.can_edit
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
         if self.dashboard_filters_enabled is not None:
-            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
-        if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
-        if self.slug is not None: body['slug'] = self.slug
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets]
+            body["dashboard_filters_enabled"] = self.dashboard_filters_enabled
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_archived is not None:
+            body["is_archived"] = self.is_archived
+        if self.is_draft is not None:
+            body["is_draft"] = self.is_draft
+        if self.is_favorite is not None:
+            body["is_favorite"] = self.is_favorite
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.permission_tier is not None:
+            body["permission_tier"] = self.permission_tier.value
+        if self.slug is not None:
+            body["slug"] = self.slug
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user:
+            body["user"] = self.user.as_dict()
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.widgets:
+            body["widgets"] = [v.as_dict() for v in self.widgets]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Dashboard into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.can_edit is not None: body['can_edit'] = self.can_edit
-        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.can_edit is not None:
+            body["can_edit"] = self.can_edit
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
         if self.dashboard_filters_enabled is not None:
-            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
-        if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier
-        if self.slug is not None: body['slug'] = self.slug
-        if self.tags: body['tags'] = self.tags
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.widgets: body['widgets'] = self.widgets
+            body["dashboard_filters_enabled"] = self.dashboard_filters_enabled
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_archived is not None:
+            body["is_archived"] = self.is_archived
+        if self.is_draft is not None:
+            body["is_draft"] = self.is_draft
+        if self.is_favorite is not None:
+            body["is_favorite"] = self.is_favorite
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.permission_tier is not None:
+            body["permission_tier"] = self.permission_tier
+        if self.slug is not None:
+            body["slug"] = self.slug
+        if self.tags:
+            body["tags"] = self.tags
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user:
+            body["user"] = self.user
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.widgets:
+            body["widgets"] = self.widgets
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dashboard:
         """Deserializes the Dashboard from a dictionary."""
-        return cls(can_edit=d.get('can_edit', None),
-                   created_at=d.get('created_at', None),
-                   dashboard_filters_enabled=d.get('dashboard_filters_enabled', None),
-                   id=d.get('id', None),
-                   is_archived=d.get('is_archived', None),
-                   is_draft=d.get('is_draft', None),
-                   is_favorite=d.get('is_favorite', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', DashboardOptions),
-                   parent=d.get('parent', None),
-                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
-                   slug=d.get('slug', None),
-                   tags=d.get('tags', None),
-                   updated_at=d.get('updated_at', None),
-                   user=_from_dict(d, 'user', User),
-                   user_id=d.get('user_id', None),
-                   widgets=_repeated_dict(d, 'widgets', Widget))
+        return cls(
+            can_edit=d.get("can_edit", None),
+            created_at=d.get("created_at", None),
+            dashboard_filters_enabled=d.get("dashboard_filters_enabled", None),
+            id=d.get("id", None),
+            is_archived=d.get("is_archived", None),
+            is_draft=d.get("is_draft", None),
+            is_favorite=d.get("is_favorite", None),
+            name=d.get("name", None),
+            options=_from_dict(d, "options", DashboardOptions),
+            parent=d.get("parent", None),
+            permission_tier=_enum(d, "permission_tier", PermissionLevel),
+            slug=d.get("slug", None),
+            tags=d.get("tags", None),
+            updated_at=d.get("updated_at", None),
+            user=_from_dict(d, "user", User),
+            user_id=d.get("user_id", None),
+            widgets=_repeated_dict(d, "widgets", Widget),
+        )
 
 
 @dataclass
@@ -1460,28 +1771,38 @@ class DashboardEditContent:
     def as_dict(self) -> dict:
         """Serializes the DashboardEditContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.name is not None: body['name'] = self.name
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
-        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role.value
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.name is not None: body['name'] = self.name
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
-        if self.tags: body['tags'] = self.tags
+        if self.dashboard_id is not None:
+            body["dashboard_id"] = self.dashboard_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardEditContent:
         """Deserializes the DashboardEditContent from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   name=d.get('name', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
-                   tags=d.get('tags', None))
+        return cls(
+            dashboard_id=d.get("dashboard_id", None),
+            name=d.get("name", None),
+            run_as_role=_enum(d, "run_as_role", RunAsRole),
+            tags=d.get("tags", None),
+        )
 
 
 @dataclass
@@ -1493,19 +1814,21 @@ class DashboardOptions:
     def as_dict(self) -> dict:
         """Serializes the DashboardOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
+        if self.moved_to_trash_at is not None:
+            body["moved_to_trash_at"] = self.moved_to_trash_at
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
+        if self.moved_to_trash_at is not None:
+            body["moved_to_trash_at"] = self.moved_to_trash_at
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardOptions:
         """Deserializes the DashboardOptions from a dictionary."""
-        return cls(moved_to_trash_at=d.get('moved_to_trash_at', None))
+        return cls(moved_to_trash_at=d.get("moved_to_trash_at", None))
 
 
 @dataclass
@@ -1532,35 +1855,47 @@ def as_dict(self) -> dict:
         """Serializes the DashboardPostContent into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.dashboard_filters_enabled is not None:
-            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.name is not None: body['name'] = self.name
-        if self.parent is not None: body['parent'] = self.parent
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
-        if self.tags: body['tags'] = [v for v in self.tags]
+            body["dashboard_filters_enabled"] = self.dashboard_filters_enabled
+        if self.is_favorite is not None:
+            body["is_favorite"] = self.is_favorite
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role.value
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.dashboard_filters_enabled is not None:
-            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.name is not None: body['name'] = self.name
-        if self.parent is not None: body['parent'] = self.parent
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
-        if self.tags: body['tags'] = self.tags
+            body["dashboard_filters_enabled"] = self.dashboard_filters_enabled
+        if self.is_favorite is not None:
+            body["is_favorite"] = self.is_favorite
+        if self.name is not None:
+            body["name"] = self.name
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardPostContent:
         """Deserializes the DashboardPostContent from a dictionary."""
-        return cls(dashboard_filters_enabled=d.get('dashboard_filters_enabled', None),
-                   is_favorite=d.get('is_favorite', None),
-                   name=d.get('name', None),
-                   parent=d.get('parent', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
-                   tags=d.get('tags', None))
+        return cls(
+            dashboard_filters_enabled=d.get("dashboard_filters_enabled", None),
+            is_favorite=d.get("is_favorite", None),
+            name=d.get("name", None),
+            parent=d.get("parent", None),
+            run_as_role=_enum(d, "run_as_role", RunAsRole),
+            tags=d.get("tags", None),
+        )
 
 
 @dataclass
@@ -1601,50 +1936,70 @@ class DataSource:
     def as_dict(self) -> dict:
         """Serializes the DataSource into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.pause_reason is not None: body['pause_reason'] = self.pause_reason
-        if self.paused is not None: body['paused'] = self.paused
-        if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit
-        if self.syntax is not None: body['syntax'] = self.syntax
-        if self.type is not None: body['type'] = self.type
-        if self.view_only is not None: body['view_only'] = self.view_only
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pause_reason is not None:
+            body["pause_reason"] = self.pause_reason
+        if self.paused is not None:
+            body["paused"] = self.paused
+        if self.supports_auto_limit is not None:
+            body["supports_auto_limit"] = self.supports_auto_limit
+        if self.syntax is not None:
+            body["syntax"] = self.syntax
+        if self.type is not None:
+            body["type"] = self.type
+        if self.view_only is not None:
+            body["view_only"] = self.view_only
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DataSource into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.pause_reason is not None: body['pause_reason'] = self.pause_reason
-        if self.paused is not None: body['paused'] = self.paused
-        if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit
-        if self.syntax is not None: body['syntax'] = self.syntax
-        if self.type is not None: body['type'] = self.type
-        if self.view_only is not None: body['view_only'] = self.view_only
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.pause_reason is not None:
+            body["pause_reason"] = self.pause_reason
+        if self.paused is not None:
+            body["paused"] = self.paused
+        if self.supports_auto_limit is not None:
+            body["supports_auto_limit"] = self.supports_auto_limit
+        if self.syntax is not None:
+            body["syntax"] = self.syntax
+        if self.type is not None:
+            body["type"] = self.type
+        if self.view_only is not None:
+            body["view_only"] = self.view_only
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataSource:
         """Deserializes the DataSource from a dictionary."""
-        return cls(id=d.get('id', None),
-                   name=d.get('name', None),
-                   pause_reason=d.get('pause_reason', None),
-                   paused=d.get('paused', None),
-                   supports_auto_limit=d.get('supports_auto_limit', None),
-                   syntax=d.get('syntax', None),
-                   type=d.get('type', None),
-                   view_only=d.get('view_only', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            id=d.get("id", None),
+            name=d.get("name", None),
+            pause_reason=d.get("pause_reason", None),
+            paused=d.get("paused", None),
+            supports_auto_limit=d.get("supports_auto_limit", None),
+            syntax=d.get("syntax", None),
+            type=d.get("type", None),
+            view_only=d.get("view_only", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 class DatePrecision(Enum):
 
-    DAY_PRECISION = 'DAY_PRECISION'
-    MINUTE_PRECISION = 'MINUTE_PRECISION'
-    SECOND_PRECISION = 'SECOND_PRECISION'
+    DAY_PRECISION = "DAY_PRECISION"
+    MINUTE_PRECISION = "MINUTE_PRECISION"
+    SECOND_PRECISION = "SECOND_PRECISION"
 
 
 @dataclass
@@ -1656,21 +2011,25 @@ class DateRange:
     def as_dict(self) -> dict:
         """Serializes the DateRange into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.end is not None: body['end'] = self.end
-        if self.start is not None: body['start'] = self.start
+        if self.end is not None:
+            body["end"] = self.end
+        if self.start is not None:
+            body["start"] = self.start
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DateRange into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.end is not None: body['end'] = self.end
-        if self.start is not None: body['start'] = self.start
+        if self.end is not None:
+            body["end"] = self.end
+        if self.start is not None:
+            body["start"] = self.start
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateRange:
         """Deserializes the DateRange from a dictionary."""
-        return cls(end=d.get('end', None), start=d.get('start', None))
+        return cls(end=d.get("end", None), start=d.get("start", None))
 
 
 @dataclass
@@ -1690,52 +2049,59 @@ class DateRangeValue:
     def as_dict(self) -> dict:
         """Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
+        if self.date_range_value:
+            body["date_range_value"] = self.date_range_value.as_dict()
         if self.dynamic_date_range_value is not None:
-            body['dynamic_date_range_value'] = self.dynamic_date_range_value.value
-        if self.precision is not None: body['precision'] = self.precision.value
-        if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
+            body["dynamic_date_range_value"] = self.dynamic_date_range_value.value
+        if self.precision is not None:
+            body["precision"] = self.precision.value
+        if self.start_day_of_week is not None:
+            body["start_day_of_week"] = self.start_day_of_week
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DateRangeValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.date_range_value: body['date_range_value'] = self.date_range_value
+        if self.date_range_value:
+            body["date_range_value"] = self.date_range_value
         if self.dynamic_date_range_value is not None:
-            body['dynamic_date_range_value'] = self.dynamic_date_range_value
-        if self.precision is not None: body['precision'] = self.precision
-        if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
+            body["dynamic_date_range_value"] = self.dynamic_date_range_value
+        if self.precision is not None:
+            body["precision"] = self.precision
+        if self.start_day_of_week is not None:
+            body["start_day_of_week"] = self.start_day_of_week
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateRangeValue:
         """Deserializes the DateRangeValue from a dictionary."""
-        return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange),
-                   dynamic_date_range_value=_enum(d, 'dynamic_date_range_value',
-                                                  DateRangeValueDynamicDateRange),
-                   precision=_enum(d, 'precision', DatePrecision),
-                   start_day_of_week=d.get('start_day_of_week', None))
+        return cls(
+            date_range_value=_from_dict(d, "date_range_value", DateRange),
+            dynamic_date_range_value=_enum(d, "dynamic_date_range_value", DateRangeValueDynamicDateRange),
+            precision=_enum(d, "precision", DatePrecision),
+            start_day_of_week=d.get("start_day_of_week", None),
+        )
 
 
 class DateRangeValueDynamicDateRange(Enum):
 
-    LAST_12_MONTHS = 'LAST_12_MONTHS'
-    LAST_14_DAYS = 'LAST_14_DAYS'
-    LAST_24_HOURS = 'LAST_24_HOURS'
-    LAST_30_DAYS = 'LAST_30_DAYS'
-    LAST_60_DAYS = 'LAST_60_DAYS'
-    LAST_7_DAYS = 'LAST_7_DAYS'
-    LAST_8_HOURS = 'LAST_8_HOURS'
-    LAST_90_DAYS = 'LAST_90_DAYS'
-    LAST_HOUR = 'LAST_HOUR'
-    LAST_MONTH = 'LAST_MONTH'
-    LAST_WEEK = 'LAST_WEEK'
-    LAST_YEAR = 'LAST_YEAR'
-    THIS_MONTH = 'THIS_MONTH'
-    THIS_WEEK = 'THIS_WEEK'
-    THIS_YEAR = 'THIS_YEAR'
-    TODAY = 'TODAY'
-    YESTERDAY = 'YESTERDAY'
+    LAST_12_MONTHS = "LAST_12_MONTHS"
+    LAST_14_DAYS = "LAST_14_DAYS"
+    LAST_24_HOURS = "LAST_24_HOURS"
+    LAST_30_DAYS = "LAST_30_DAYS"
+    LAST_60_DAYS = "LAST_60_DAYS"
+    LAST_7_DAYS = "LAST_7_DAYS"
+    LAST_8_HOURS = "LAST_8_HOURS"
+    LAST_90_DAYS = "LAST_90_DAYS"
+    LAST_HOUR = "LAST_HOUR"
+    LAST_MONTH = "LAST_MONTH"
+    LAST_WEEK = "LAST_WEEK"
+    LAST_YEAR = "LAST_YEAR"
+    THIS_MONTH = "THIS_MONTH"
+    THIS_WEEK = "THIS_WEEK"
+    THIS_YEAR = "THIS_YEAR"
+    TODAY = "TODAY"
+    YESTERDAY = "YESTERDAY"
 
 
 @dataclass
@@ -1753,31 +2119,39 @@ class DateValue:
     def as_dict(self) -> dict:
         """Serializes the DateValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.date_value is not None: body['date_value'] = self.date_value
-        if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value
-        if self.precision is not None: body['precision'] = self.precision.value
+        if self.date_value is not None:
+            body["date_value"] = self.date_value
+        if self.dynamic_date_value is not None:
+            body["dynamic_date_value"] = self.dynamic_date_value.value
+        if self.precision is not None:
+            body["precision"] = self.precision.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DateValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.date_value is not None: body['date_value'] = self.date_value
-        if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value
-        if self.precision is not None: body['precision'] = self.precision
+        if self.date_value is not None:
+            body["date_value"] = self.date_value
+        if self.dynamic_date_value is not None:
+            body["dynamic_date_value"] = self.dynamic_date_value
+        if self.precision is not None:
+            body["precision"] = self.precision
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateValue:
         """Deserializes the DateValue from a dictionary."""
-        return cls(date_value=d.get('date_value', None),
-                   dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate),
-                   precision=_enum(d, 'precision', DatePrecision))
+        return cls(
+            date_value=d.get("date_value", None),
+            dynamic_date_value=_enum(d, "dynamic_date_value", DateValueDynamicDate),
+            precision=_enum(d, "precision", DatePrecision),
+        )
 
 
 class DateValueDynamicDate(Enum):
 
-    NOW = 'NOW'
-    YESTERDAY = 'YESTERDAY'
+    NOW = "NOW"
+    YESTERDAY = "YESTERDAY"
 
 
 @dataclass
@@ -1820,8 +2194,8 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse:
 
 class Disposition(Enum):
 
-    EXTERNAL_LINKS = 'EXTERNAL_LINKS'
-    INLINE = 'INLINE'
+    EXTERNAL_LINKS = "EXTERNAL_LINKS"
+    INLINE = "INLINE"
 
 
 @dataclass
@@ -1844,31 +2218,43 @@ class EditAlert:
     def as_dict(self) -> dict:
         """Serializes the EditAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert_id is not None: body['alert_id'] = self.alert_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.alert_id is not None:
+            body["alert_id"] = self.alert_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.rearm is not None:
+            body["rearm"] = self.rearm
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EditAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert_id is not None: body['alert_id'] = self.alert_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.alert_id is not None:
+            body["alert_id"] = self.alert_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.rearm is not None:
+            body["rearm"] = self.rearm
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditAlert:
         """Deserializes the EditAlert from a dictionary."""
-        return cls(alert_id=d.get('alert_id', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', AlertOptions),
-                   query_id=d.get('query_id', None),
-                   rearm=d.get('rearm', None))
+        return cls(
+            alert_id=d.get("alert_id", None),
+            name=d.get("name", None),
+            options=_from_dict(d, "options", AlertOptions),
+            query_id=d.get("query_id", None),
+            rearm=d.get("rearm", None),
+        )
 
 
 @dataclass
@@ -1947,70 +2333,97 @@ class EditWarehouseRequest:
     def as_dict(self) -> dict:
         """Serializes the EditWarehouseRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel.as_dict()
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel.as_dict()
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.id is not None: body['id'] = self.id
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
         if self.spot_instance_policy is not None:
-            body['spot_instance_policy'] = self.spot_instance_policy.value
-        if self.tags: body['tags'] = self.tags.as_dict()
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
+            body["spot_instance_policy"] = self.spot_instance_policy.value
+        if self.tags:
+            body["tags"] = self.tags.as_dict()
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.id is not None: body['id'] = self.id
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
-        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
-        if self.tags: body['tags'] = self.tags
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
+        if self.spot_instance_policy is not None:
+            body["spot_instance_policy"] = self.spot_instance_policy
+        if self.tags:
+            body["tags"] = self.tags
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditWarehouseRequest:
         """Deserializes the EditWarehouseRequest from a dictionary."""
-        return cls(auto_stop_mins=d.get('auto_stop_mins', None),
-                   channel=_from_dict(d, 'channel', Channel),
-                   cluster_size=d.get('cluster_size', None),
-                   creator_name=d.get('creator_name', None),
-                   enable_photon=d.get('enable_photon', None),
-                   enable_serverless_compute=d.get('enable_serverless_compute', None),
-                   id=d.get('id', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   max_num_clusters=d.get('max_num_clusters', None),
-                   min_num_clusters=d.get('min_num_clusters', None),
-                   name=d.get('name', None),
-                   spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy),
-                   tags=_from_dict(d, 'tags', EndpointTags),
-                   warehouse_type=_enum(d, 'warehouse_type', EditWarehouseRequestWarehouseType))
+        return cls(
+            auto_stop_mins=d.get("auto_stop_mins", None),
+            channel=_from_dict(d, "channel", Channel),
+            cluster_size=d.get("cluster_size", None),
+            creator_name=d.get("creator_name", None),
+            enable_photon=d.get("enable_photon", None),
+            enable_serverless_compute=d.get("enable_serverless_compute", None),
+            id=d.get("id", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            max_num_clusters=d.get("max_num_clusters", None),
+            min_num_clusters=d.get("min_num_clusters", None),
+            name=d.get("name", None),
+            spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy),
+            tags=_from_dict(d, "tags", EndpointTags),
+            warehouse_type=_enum(d, "warehouse_type", EditWarehouseRequestWarehouseType),
+        )
 
 
 class EditWarehouseRequestWarehouseType(Enum):
     """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO`
     and also set the field `enable_serverless_compute` to `true`."""
 
-    CLASSIC = 'CLASSIC'
-    PRO = 'PRO'
-    TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED'
+    CLASSIC = "CLASSIC"
+    PRO = "PRO"
+    TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED"
 
 
 @dataclass
@@ -2062,21 +2475,25 @@ class EndpointConfPair:
     def as_dict(self) -> dict:
         """Serializes the EndpointConfPair into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointConfPair into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointConfPair:
         """Deserializes the EndpointConfPair from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -2100,31 +2517,43 @@ class EndpointHealth:
     def as_dict(self) -> dict:
         """Serializes the EndpointHealth into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.details is not None: body['details'] = self.details
-        if self.failure_reason: body['failure_reason'] = self.failure_reason.as_dict()
-        if self.message is not None: body['message'] = self.message
-        if self.status is not None: body['status'] = self.status.value
-        if self.summary is not None: body['summary'] = self.summary
+        if self.details is not None:
+            body["details"] = self.details
+        if self.failure_reason:
+            body["failure_reason"] = self.failure_reason.as_dict()
+        if self.message is not None:
+            body["message"] = self.message
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.summary is not None:
+            body["summary"] = self.summary
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointHealth into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.details is not None: body['details'] = self.details
-        if self.failure_reason: body['failure_reason'] = self.failure_reason
-        if self.message is not None: body['message'] = self.message
-        if self.status is not None: body['status'] = self.status
-        if self.summary is not None: body['summary'] = self.summary
+        if self.details is not None:
+            body["details"] = self.details
+        if self.failure_reason:
+            body["failure_reason"] = self.failure_reason
+        if self.message is not None:
+            body["message"] = self.message
+        if self.status is not None:
+            body["status"] = self.status
+        if self.summary is not None:
+            body["summary"] = self.summary
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointHealth:
         """Deserializes the EndpointHealth from a dictionary."""
-        return cls(details=d.get('details', None),
-                   failure_reason=_from_dict(d, 'failure_reason', TerminationReason),
-                   message=d.get('message', None),
-                   status=_enum(d, 'status', Status),
-                   summary=d.get('summary', None))
+        return cls(
+            details=d.get("details", None),
+            failure_reason=_from_dict(d, "failure_reason", TerminationReason),
+            message=d.get("message", None),
+            status=_enum(d, "status", Status),
+            summary=d.get("summary", None),
+        )
 
 
 @dataclass
@@ -2221,88 +2650,127 @@ class EndpointInfo:
     def as_dict(self) -> dict:
         """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel.as_dict()
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel.as_dict()
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.health: body['health'] = self.health.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
-        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
-        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
-        if self.odbc_params: body['odbc_params'] = self.odbc_params.as_dict()
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.jdbc_url is not None:
+            body["jdbc_url"] = self.jdbc_url
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
+        if self.num_active_sessions is not None:
+            body["num_active_sessions"] = self.num_active_sessions
+        if self.num_clusters is not None:
+            body["num_clusters"] = self.num_clusters
+        if self.odbc_params:
+            body["odbc_params"] = self.odbc_params.as_dict()
         if self.spot_instance_policy is not None:
-            body['spot_instance_policy'] = self.spot_instance_policy.value
-        if self.state is not None: body['state'] = self.state.value
-        if self.tags: body['tags'] = self.tags.as_dict()
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
+            body["spot_instance_policy"] = self.spot_instance_policy.value
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.tags:
+            body["tags"] = self.tags.as_dict()
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.health: body['health'] = self.health
-        if self.id is not None: body['id'] = self.id
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
-        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
-        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
-        if self.odbc_params: body['odbc_params'] = self.odbc_params
-        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
-        if self.state is not None: body['state'] = self.state
-        if self.tags: body['tags'] = self.tags
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.health:
+            body["health"] = self.health
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.jdbc_url is not None:
+            body["jdbc_url"] = self.jdbc_url
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
+        if self.num_active_sessions is not None:
+            body["num_active_sessions"] = self.num_active_sessions
+        if self.num_clusters is not None:
+            body["num_clusters"] = self.num_clusters
+        if self.odbc_params:
+            body["odbc_params"] = self.odbc_params
+        if self.spot_instance_policy is not None:
+            body["spot_instance_policy"] = self.spot_instance_policy
+        if self.state is not None:
+            body["state"] = self.state
+        if self.tags:
+            body["tags"] = self.tags
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointInfo:
         """Deserializes the EndpointInfo from a dictionary."""
-        return cls(auto_stop_mins=d.get('auto_stop_mins', None),
-                   channel=_from_dict(d, 'channel', Channel),
-                   cluster_size=d.get('cluster_size', None),
-                   creator_name=d.get('creator_name', None),
-                   enable_photon=d.get('enable_photon', None),
-                   enable_serverless_compute=d.get('enable_serverless_compute', None),
-                   health=_from_dict(d, 'health', EndpointHealth),
-                   id=d.get('id', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   jdbc_url=d.get('jdbc_url', None),
-                   max_num_clusters=d.get('max_num_clusters', None),
-                   min_num_clusters=d.get('min_num_clusters', None),
-                   name=d.get('name', None),
-                   num_active_sessions=d.get('num_active_sessions', None),
-                   num_clusters=d.get('num_clusters', None),
-                   odbc_params=_from_dict(d, 'odbc_params', OdbcParams),
-                   spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy),
-                   state=_enum(d, 'state', State),
-                   tags=_from_dict(d, 'tags', EndpointTags),
-                   warehouse_type=_enum(d, 'warehouse_type', EndpointInfoWarehouseType))
+        return cls(
+            auto_stop_mins=d.get("auto_stop_mins", None),
+            channel=_from_dict(d, "channel", Channel),
+            cluster_size=d.get("cluster_size", None),
+            creator_name=d.get("creator_name", None),
+            enable_photon=d.get("enable_photon", None),
+            enable_serverless_compute=d.get("enable_serverless_compute", None),
+            health=_from_dict(d, "health", EndpointHealth),
+            id=d.get("id", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            jdbc_url=d.get("jdbc_url", None),
+            max_num_clusters=d.get("max_num_clusters", None),
+            min_num_clusters=d.get("min_num_clusters", None),
+            name=d.get("name", None),
+            num_active_sessions=d.get("num_active_sessions", None),
+            num_clusters=d.get("num_clusters", None),
+            odbc_params=_from_dict(d, "odbc_params", OdbcParams),
+            spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy),
+            state=_enum(d, "state", State),
+            tags=_from_dict(d, "tags", EndpointTags),
+            warehouse_type=_enum(d, "warehouse_type", EndpointInfoWarehouseType),
+        )
 
 
 class EndpointInfoWarehouseType(Enum):
     """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO`
     and also set the field `enable_serverless_compute` to `true`."""
 
-    CLASSIC = 'CLASSIC'
-    PRO = 'PRO'
-    TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED'
+    CLASSIC = "CLASSIC"
+    PRO = "PRO"
+    TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED"
 
 
 @dataclass
@@ -2314,21 +2782,25 @@ class EndpointTagPair:
     def as_dict(self) -> dict:
         """Serializes the EndpointTagPair into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointTagPair into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTagPair:
         """Deserializes the EndpointTagPair from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -2338,19 +2810,21 @@ class EndpointTags:
     def as_dict(self) -> dict:
         """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
+        if self.custom_tags:
+            body["custom_tags"] = [v.as_dict() for v in self.custom_tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointTags into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.custom_tags:
+            body["custom_tags"] = self.custom_tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
         """Deserializes the EndpointTags from a dictionary."""
-        return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair))
+        return cls(custom_tags=_repeated_dict(d, "custom_tags", EndpointTagPair))
 
 
 @dataclass
@@ -2367,25 +2841,33 @@ class EnumValue:
     def as_dict(self) -> dict:
         """Serializes the EnumValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enum_options is not None: body['enum_options'] = self.enum_options
-        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
-        if self.values: body['values'] = [v for v in self.values]
+        if self.enum_options is not None:
+            body["enum_options"] = self.enum_options
+        if self.multi_values_options:
+            body["multi_values_options"] = self.multi_values_options.as_dict()
+        if self.values:
+            body["values"] = [v for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EnumValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enum_options is not None: body['enum_options'] = self.enum_options
-        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options
-        if self.values: body['values'] = self.values
+        if self.enum_options is not None:
+            body["enum_options"] = self.enum_options
+        if self.multi_values_options:
+            body["multi_values_options"] = self.multi_values_options
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnumValue:
         """Deserializes the EnumValue from a dictionary."""
-        return cls(enum_options=d.get('enum_options', None),
-                   multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
-                   values=d.get('values', None))
+        return cls(
+            enum_options=d.get("enum_options", None),
+            multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions),
+            values=d.get("values", None),
+        )
 
 
 @dataclass
@@ -2508,49 +2990,73 @@ class ExecuteStatementRequest:
     def as_dict(self) -> dict:
         """Serializes the ExecuteStatementRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.byte_limit is not None: body['byte_limit'] = self.byte_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.disposition is not None: body['disposition'] = self.disposition.value
-        if self.format is not None: body['format'] = self.format.value
-        if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout.value
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.row_limit is not None: body['row_limit'] = self.row_limit
-        if self.schema is not None: body['schema'] = self.schema
-        if self.statement is not None: body['statement'] = self.statement
-        if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.byte_limit is not None:
+            body["byte_limit"] = self.byte_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.disposition is not None:
+            body["disposition"] = self.disposition.value
+        if self.format is not None:
+            body["format"] = self.format.value
+        if self.on_wait_timeout is not None:
+            body["on_wait_timeout"] = self.on_wait_timeout.value
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.row_limit is not None:
+            body["row_limit"] = self.row_limit
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.statement is not None:
+            body["statement"] = self.statement
+        if self.wait_timeout is not None:
+            body["wait_timeout"] = self.wait_timeout
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.byte_limit is not None: body['byte_limit'] = self.byte_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.disposition is not None: body['disposition'] = self.disposition
-        if self.format is not None: body['format'] = self.format
-        if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout
-        if self.parameters: body['parameters'] = self.parameters
-        if self.row_limit is not None: body['row_limit'] = self.row_limit
-        if self.schema is not None: body['schema'] = self.schema
-        if self.statement is not None: body['statement'] = self.statement
-        if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.byte_limit is not None:
+            body["byte_limit"] = self.byte_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.disposition is not None:
+            body["disposition"] = self.disposition
+        if self.format is not None:
+            body["format"] = self.format
+        if self.on_wait_timeout is not None:
+            body["on_wait_timeout"] = self.on_wait_timeout
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.row_limit is not None:
+            body["row_limit"] = self.row_limit
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.statement is not None:
+            body["statement"] = self.statement
+        if self.wait_timeout is not None:
+            body["wait_timeout"] = self.wait_timeout
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementRequest:
         """Deserializes the ExecuteStatementRequest from a dictionary."""
-        return cls(byte_limit=d.get('byte_limit', None),
-                   catalog=d.get('catalog', None),
-                   disposition=_enum(d, 'disposition', Disposition),
-                   format=_enum(d, 'format', Format),
-                   on_wait_timeout=_enum(d, 'on_wait_timeout', ExecuteStatementRequestOnWaitTimeout),
-                   parameters=_repeated_dict(d, 'parameters', StatementParameterListItem),
-                   row_limit=d.get('row_limit', None),
-                   schema=d.get('schema', None),
-                   statement=d.get('statement', None),
-                   wait_timeout=d.get('wait_timeout', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            byte_limit=d.get("byte_limit", None),
+            catalog=d.get("catalog", None),
+            disposition=_enum(d, "disposition", Disposition),
+            format=_enum(d, "format", Format),
+            on_wait_timeout=_enum(d, "on_wait_timeout", ExecuteStatementRequestOnWaitTimeout),
+            parameters=_repeated_dict(d, "parameters", StatementParameterListItem),
+            row_limit=d.get("row_limit", None),
+            schema=d.get("schema", None),
+            statement=d.get("statement", None),
+            wait_timeout=d.get("wait_timeout", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 class ExecuteStatementRequestOnWaitTimeout(Enum):
@@ -2561,8 +3067,8 @@ class ExecuteStatementRequestOnWaitTimeout(Enum):
     :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is
     canceled and the call returns with a `CANCELED` state."""
 
-    CANCEL = 'CANCEL'
-    CONTINUE = 'CONTINUE'
+    CANCEL = "CANCEL"
+    CONTINUE = "CONTINUE"
 
 
 @dataclass
@@ -2605,52 +3111,70 @@ class ExternalLink:
     def as_dict(self) -> dict:
         """Serializes the ExternalLink into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.byte_count is not None: body['byte_count'] = self.byte_count
-        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
-        if self.expiration is not None: body['expiration'] = self.expiration
-        if self.external_link is not None: body['external_link'] = self.external_link
-        if self.http_headers: body['http_headers'] = self.http_headers
-        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.byte_count is not None:
+            body["byte_count"] = self.byte_count
+        if self.chunk_index is not None:
+            body["chunk_index"] = self.chunk_index
+        if self.expiration is not None:
+            body["expiration"] = self.expiration
+        if self.external_link is not None:
+            body["external_link"] = self.external_link
+        if self.http_headers:
+            body["http_headers"] = self.http_headers
+        if self.next_chunk_index is not None:
+            body["next_chunk_index"] = self.next_chunk_index
         if self.next_chunk_internal_link is not None:
-            body['next_chunk_internal_link'] = self.next_chunk_internal_link
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.row_offset is not None: body['row_offset'] = self.row_offset
+            body["next_chunk_internal_link"] = self.next_chunk_internal_link
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.row_offset is not None:
+            body["row_offset"] = self.row_offset
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExternalLink into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.byte_count is not None: body['byte_count'] = self.byte_count
-        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
-        if self.expiration is not None: body['expiration'] = self.expiration
-        if self.external_link is not None: body['external_link'] = self.external_link
-        if self.http_headers: body['http_headers'] = self.http_headers
-        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.byte_count is not None:
+            body["byte_count"] = self.byte_count
+        if self.chunk_index is not None:
+            body["chunk_index"] = self.chunk_index
+        if self.expiration is not None:
+            body["expiration"] = self.expiration
+        if self.external_link is not None:
+            body["external_link"] = self.external_link
+        if self.http_headers:
+            body["http_headers"] = self.http_headers
+        if self.next_chunk_index is not None:
+            body["next_chunk_index"] = self.next_chunk_index
         if self.next_chunk_internal_link is not None:
-            body['next_chunk_internal_link'] = self.next_chunk_internal_link
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.row_offset is not None: body['row_offset'] = self.row_offset
+            body["next_chunk_internal_link"] = self.next_chunk_internal_link
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.row_offset is not None:
+            body["row_offset"] = self.row_offset
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalLink:
         """Deserializes the ExternalLink from a dictionary."""
-        return cls(byte_count=d.get('byte_count', None),
-                   chunk_index=d.get('chunk_index', None),
-                   expiration=d.get('expiration', None),
-                   external_link=d.get('external_link', None),
-                   http_headers=d.get('http_headers', None),
-                   next_chunk_index=d.get('next_chunk_index', None),
-                   next_chunk_internal_link=d.get('next_chunk_internal_link', None),
-                   row_count=d.get('row_count', None),
-                   row_offset=d.get('row_offset', None))
+        return cls(
+            byte_count=d.get("byte_count", None),
+            chunk_index=d.get("chunk_index", None),
+            expiration=d.get("expiration", None),
+            external_link=d.get("external_link", None),
+            http_headers=d.get("http_headers", None),
+            next_chunk_index=d.get("next_chunk_index", None),
+            next_chunk_internal_link=d.get("next_chunk_internal_link", None),
+            row_count=d.get("row_count", None),
+            row_offset=d.get("row_offset", None),
+        )
 
 
 class Format(Enum):
 
-    ARROW_STREAM = 'ARROW_STREAM'
-    CSV = 'CSV'
-    JSON_ARRAY = 'JSON_ARRAY'
+    ARROW_STREAM = "ARROW_STREAM"
+    CSV = "CSV"
+    JSON_ARRAY = "JSON_ARRAY"
 
 
 @dataclass
@@ -2667,25 +3191,32 @@ def as_dict(self) -> dict:
         """Serializes the GetResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type.value
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetResponse:
         """Deserializes the GetResponse from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl),
-                   object_id=d.get('object_id', None),
-                   object_type=_enum(d, 'object_type', ObjectType))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", AccessControl),
+            object_id=d.get("object_id", None),
+            object_type=_enum(d, "object_type", ObjectType),
+        )
 
 
 @dataclass
@@ -2696,19 +3227,21 @@ class GetWarehousePermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetWarehousePermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetWarehousePermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWarehousePermissionLevelsResponse:
         """Deserializes the GetWarehousePermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', WarehousePermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", WarehousePermissionsDescription))
 
 
 @dataclass
@@ -2805,88 +3338,127 @@ class GetWarehouseResponse:
     def as_dict(self) -> dict:
         """Serializes the GetWarehouseResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel.as_dict()
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel.as_dict()
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.health: body['health'] = self.health.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
-        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
-        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
-        if self.odbc_params: body['odbc_params'] = self.odbc_params.as_dict()
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.health:
+            body["health"] = self.health.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.jdbc_url is not None:
+            body["jdbc_url"] = self.jdbc_url
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
+        if self.num_active_sessions is not None:
+            body["num_active_sessions"] = self.num_active_sessions
+        if self.num_clusters is not None:
+            body["num_clusters"] = self.num_clusters
+        if self.odbc_params:
+            body["odbc_params"] = self.odbc_params.as_dict()
         if self.spot_instance_policy is not None:
-            body['spot_instance_policy'] = self.spot_instance_policy.value
-        if self.state is not None: body['state'] = self.state.value
-        if self.tags: body['tags'] = self.tags.as_dict()
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
+            body["spot_instance_policy"] = self.spot_instance_policy.value
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.tags:
+            body["tags"] = self.tags.as_dict()
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetWarehouseResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
-        if self.channel: body['channel'] = self.channel
-        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
-        if self.creator_name is not None: body['creator_name'] = self.creator_name
-        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.auto_stop_mins is not None:
+            body["auto_stop_mins"] = self.auto_stop_mins
+        if self.channel:
+            body["channel"] = self.channel
+        if self.cluster_size is not None:
+            body["cluster_size"] = self.cluster_size
+        if self.creator_name is not None:
+            body["creator_name"] = self.creator_name
+        if self.enable_photon is not None:
+            body["enable_photon"] = self.enable_photon
         if self.enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = self.enable_serverless_compute
-        if self.health: body['health'] = self.health
-        if self.id is not None: body['id'] = self.id
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
-        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
-        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
-        if self.name is not None: body['name'] = self.name
-        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
-        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
-        if self.odbc_params: body['odbc_params'] = self.odbc_params
-        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
-        if self.state is not None: body['state'] = self.state
-        if self.tags: body['tags'] = self.tags
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+            body["enable_serverless_compute"] = self.enable_serverless_compute
+        if self.health:
+            body["health"] = self.health
+        if self.id is not None:
+            body["id"] = self.id
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.jdbc_url is not None:
+            body["jdbc_url"] = self.jdbc_url
+        if self.max_num_clusters is not None:
+            body["max_num_clusters"] = self.max_num_clusters
+        if self.min_num_clusters is not None:
+            body["min_num_clusters"] = self.min_num_clusters
+        if self.name is not None:
+            body["name"] = self.name
+        if self.num_active_sessions is not None:
+            body["num_active_sessions"] = self.num_active_sessions
+        if self.num_clusters is not None:
+            body["num_clusters"] = self.num_clusters
+        if self.odbc_params:
+            body["odbc_params"] = self.odbc_params
+        if self.spot_instance_policy is not None:
+            body["spot_instance_policy"] = self.spot_instance_policy
+        if self.state is not None:
+            body["state"] = self.state
+        if self.tags:
+            body["tags"] = self.tags
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWarehouseResponse:
         """Deserializes the GetWarehouseResponse from a dictionary."""
-        return cls(auto_stop_mins=d.get('auto_stop_mins', None),
-                   channel=_from_dict(d, 'channel', Channel),
-                   cluster_size=d.get('cluster_size', None),
-                   creator_name=d.get('creator_name', None),
-                   enable_photon=d.get('enable_photon', None),
-                   enable_serverless_compute=d.get('enable_serverless_compute', None),
-                   health=_from_dict(d, 'health', EndpointHealth),
-                   id=d.get('id', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   jdbc_url=d.get('jdbc_url', None),
-                   max_num_clusters=d.get('max_num_clusters', None),
-                   min_num_clusters=d.get('min_num_clusters', None),
-                   name=d.get('name', None),
-                   num_active_sessions=d.get('num_active_sessions', None),
-                   num_clusters=d.get('num_clusters', None),
-                   odbc_params=_from_dict(d, 'odbc_params', OdbcParams),
-                   spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy),
-                   state=_enum(d, 'state', State),
-                   tags=_from_dict(d, 'tags', EndpointTags),
-                   warehouse_type=_enum(d, 'warehouse_type', GetWarehouseResponseWarehouseType))
+        return cls(
+            auto_stop_mins=d.get("auto_stop_mins", None),
+            channel=_from_dict(d, "channel", Channel),
+            cluster_size=d.get("cluster_size", None),
+            creator_name=d.get("creator_name", None),
+            enable_photon=d.get("enable_photon", None),
+            enable_serverless_compute=d.get("enable_serverless_compute", None),
+            health=_from_dict(d, "health", EndpointHealth),
+            id=d.get("id", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            jdbc_url=d.get("jdbc_url", None),
+            max_num_clusters=d.get("max_num_clusters", None),
+            min_num_clusters=d.get("min_num_clusters", None),
+            name=d.get("name", None),
+            num_active_sessions=d.get("num_active_sessions", None),
+            num_clusters=d.get("num_clusters", None),
+            odbc_params=_from_dict(d, "odbc_params", OdbcParams),
+            spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy),
+            state=_enum(d, "state", State),
+            tags=_from_dict(d, "tags", EndpointTags),
+            warehouse_type=_enum(d, "warehouse_type", GetWarehouseResponseWarehouseType),
+        )
 
 
 class GetWarehouseResponseWarehouseType(Enum):
     """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO`
     and also set the field `enable_serverless_compute` to `true`."""
 
-    CLASSIC = 'CLASSIC'
-    PRO = 'PRO'
-    TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED'
+    CLASSIC = "CLASSIC"
+    PRO = "PRO"
+    TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED"
 
 
 @dataclass
@@ -2926,59 +3498,75 @@ class GetWorkspaceWarehouseConfigResponse:
     def as_dict(self) -> dict:
         """Serializes the GetWorkspaceWarehouseConfigResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.channel: body['channel'] = self.channel.as_dict()
-        if self.config_param: body['config_param'] = self.config_param.as_dict()
+        if self.channel:
+            body["channel"] = self.channel.as_dict()
+        if self.config_param:
+            body["config_param"] = self.config_param.as_dict()
         if self.data_access_config:
-            body['data_access_config'] = [v.as_dict() for v in self.data_access_config]
+            body["data_access_config"] = [v.as_dict() for v in self.data_access_config]
         if self.enabled_warehouse_types:
-            body['enabled_warehouse_types'] = [v.as_dict() for v in self.enabled_warehouse_types]
-        if self.global_param: body['global_param'] = self.global_param.as_dict()
+            body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types]
+        if self.global_param:
+            body["global_param"] = self.global_param.as_dict()
         if self.google_service_account is not None:
-            body['google_service_account'] = self.google_service_account
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.security_policy is not None: body['security_policy'] = self.security_policy.value
+            body["google_service_account"] = self.google_service_account
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.security_policy is not None:
+            body["security_policy"] = self.security_policy.value
         if self.sql_configuration_parameters:
-            body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict()
+            body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.channel: body['channel'] = self.channel
-        if self.config_param: body['config_param'] = self.config_param
-        if self.data_access_config: body['data_access_config'] = self.data_access_config
-        if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types
-        if self.global_param: body['global_param'] = self.global_param
+        if self.channel:
+            body["channel"] = self.channel
+        if self.config_param:
+            body["config_param"] = self.config_param
+        if self.data_access_config:
+            body["data_access_config"] = self.data_access_config
+        if self.enabled_warehouse_types:
+            body["enabled_warehouse_types"] = self.enabled_warehouse_types
+        if self.global_param:
+            body["global_param"] = self.global_param
         if self.google_service_account is not None:
-            body['google_service_account'] = self.google_service_account
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.security_policy is not None: body['security_policy'] = self.security_policy
+            body["google_service_account"] = self.google_service_account
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.security_policy is not None:
+            body["security_policy"] = self.security_policy
         if self.sql_configuration_parameters:
-            body['sql_configuration_parameters'] = self.sql_configuration_parameters
+            body["sql_configuration_parameters"] = self.sql_configuration_parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceWarehouseConfigResponse:
         """Deserializes the GetWorkspaceWarehouseConfigResponse from a dictionary."""
-        return cls(channel=_from_dict(d, 'channel', Channel),
-                   config_param=_from_dict(d, 'config_param', RepeatedEndpointConfPairs),
-                   data_access_config=_repeated_dict(d, 'data_access_config', EndpointConfPair),
-                   enabled_warehouse_types=_repeated_dict(d, 'enabled_warehouse_types', WarehouseTypePair),
-                   global_param=_from_dict(d, 'global_param', RepeatedEndpointConfPairs),
-                   google_service_account=d.get('google_service_account', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   security_policy=_enum(d, 'security_policy',
-                                         GetWorkspaceWarehouseConfigResponseSecurityPolicy),
-                   sql_configuration_parameters=_from_dict(d, 'sql_configuration_parameters',
-                                                           RepeatedEndpointConfPairs))
+        return cls(
+            channel=_from_dict(d, "channel", Channel),
+            config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs),
+            data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair),
+            enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair),
+            global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs),
+            google_service_account=d.get("google_service_account", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            security_policy=_enum(
+                d,
+                "security_policy",
+                GetWorkspaceWarehouseConfigResponseSecurityPolicy,
+            ),
+            sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs),
+        )
 
 
 class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum):
     """Security policy for warehouses"""
 
-    DATA_ACCESS_CONTROL = 'DATA_ACCESS_CONTROL'
-    NONE = 'NONE'
-    PASSTHROUGH = 'PASSTHROUGH'
+    DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL"
+    NONE = "NONE"
+    PASSTHROUGH = "PASSTHROUGH"
 
 
 @dataclass
@@ -3019,58 +3607,83 @@ class LegacyAlert:
     def as_dict(self) -> dict:
         """Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.id is not None: body['id'] = self.id
-        if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query: body['query'] = self.query.as_dict()
-        if self.rearm is not None: body['rearm'] = self.rearm
-        if self.state is not None: body['state'] = self.state.value
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.id is not None:
+            body["id"] = self.id
+        if self.last_triggered_at is not None:
+            body["last_triggered_at"] = self.last_triggered_at
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.query:
+            body["query"] = self.query.as_dict()
+        if self.rearm is not None:
+            body["rearm"] = self.rearm
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user:
+            body["user"] = self.user.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LegacyAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.id is not None: body['id'] = self.id
-        if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query: body['query'] = self.query
-        if self.rearm is not None: body['rearm'] = self.rearm
-        if self.state is not None: body['state'] = self.state
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.id is not None:
+            body["id"] = self.id
+        if self.last_triggered_at is not None:
+            body["last_triggered_at"] = self.last_triggered_at
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.query:
+            body["query"] = self.query
+        if self.rearm is not None:
+            body["rearm"] = self.rearm
+        if self.state is not None:
+            body["state"] = self.state
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user:
+            body["user"] = self.user
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyAlert:
         """Deserializes the LegacyAlert from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   id=d.get('id', None),
-                   last_triggered_at=d.get('last_triggered_at', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', AlertOptions),
-                   parent=d.get('parent', None),
-                   query=_from_dict(d, 'query', AlertQuery),
-                   rearm=d.get('rearm', None),
-                   state=_enum(d, 'state', LegacyAlertState),
-                   updated_at=d.get('updated_at', None),
-                   user=_from_dict(d, 'user', User))
+        return cls(
+            created_at=d.get("created_at", None),
+            id=d.get("id", None),
+            last_triggered_at=d.get("last_triggered_at", None),
+            name=d.get("name", None),
+            options=_from_dict(d, "options", AlertOptions),
+            parent=d.get("parent", None),
+            query=_from_dict(d, "query", AlertQuery),
+            rearm=d.get("rearm", None),
+            state=_enum(d, "state", LegacyAlertState),
+            updated_at=d.get("updated_at", None),
+            user=_from_dict(d, "user", User),
+        )
 
 
 class LegacyAlertState(Enum):
     """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated
-    and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions)."""
+    and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).
+    """
 
-    OK = 'ok'
-    TRIGGERED = 'triggered'
-    UNKNOWN = 'unknown'
+    OK = "ok"
+    TRIGGERED = "triggered"
+    UNKNOWN = "unknown"
 
 
 @dataclass
@@ -3157,88 +3770,138 @@ class LegacyQuery:
     def as_dict(self) -> dict:
         """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.can_edit is not None: body['can_edit'] = self.can_edit
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
-        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
-        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
-        if self.query is not None: body['query'] = self.query
-        if self.query_hash is not None: body['query_hash'] = self.query_hash
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
+        if self.can_edit is not None:
+            body["can_edit"] = self.can_edit
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_archived is not None:
+            body["is_archived"] = self.is_archived
+        if self.is_draft is not None:
+            body["is_draft"] = self.is_draft
+        if self.is_favorite is not None:
+            body["is_favorite"] = self.is_favorite
+        if self.is_safe is not None:
+            body["is_safe"] = self.is_safe
+        if self.last_modified_by:
+            body["last_modified_by"] = self.last_modified_by.as_dict()
+        if self.last_modified_by_id is not None:
+            body["last_modified_by_id"] = self.last_modified_by_id
+        if self.latest_query_data_id is not None:
+            body["latest_query_data_id"] = self.latest_query_data_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.permission_tier is not None:
+            body["permission_tier"] = self.permission_tier.value
+        if self.query is not None:
+            body["query"] = self.query
+        if self.query_hash is not None:
+            body["query_hash"] = self.query_hash
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role.value
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user:
+            body["user"] = self.user.as_dict()
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.visualizations:
+            body["visualizations"] = [v.as_dict() for v in self.visualizations]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LegacyQuery into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.can_edit is not None: body['can_edit'] = self.can_edit
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by
-        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
-        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier
-        if self.query is not None: body['query'] = self.query
-        if self.query_hash is not None: body['query_hash'] = self.query_hash
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
-        if self.tags: body['tags'] = self.tags
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.visualizations: body['visualizations'] = self.visualizations
+        if self.can_edit is not None:
+            body["can_edit"] = self.can_edit
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.is_archived is not None:
+            body["is_archived"] = self.is_archived
+        if self.is_draft is not None:
+            body["is_draft"] = self.is_draft
+        if self.is_favorite is not None:
+            body["is_favorite"] = self.is_favorite
+        if self.is_safe is not None:
+            body["is_safe"] = self.is_safe
+        if self.last_modified_by:
+            body["last_modified_by"] = self.last_modified_by
+        if self.last_modified_by_id is not None:
+            body["last_modified_by_id"] = self.last_modified_by_id
+        if self.latest_query_data_id is not None:
+            body["latest_query_data_id"] = self.latest_query_data_id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.permission_tier is not None:
+            body["permission_tier"] = self.permission_tier
+        if self.query is not None:
+            body["query"] = self.query
+        if self.query_hash is not None:
+            body["query_hash"] = self.query_hash
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role
+        if self.tags:
+            body["tags"] = self.tags
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
+        if self.user:
+            body["user"] = self.user
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.visualizations:
+            body["visualizations"] = self.visualizations
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
         """Deserializes the LegacyQuery from a dictionary."""
-        return cls(can_edit=d.get('can_edit', None),
-                   created_at=d.get('created_at', None),
-                   data_source_id=d.get('data_source_id', None),
-                   description=d.get('description', None),
-                   id=d.get('id', None),
-                   is_archived=d.get('is_archived', None),
-                   is_draft=d.get('is_draft', None),
-                   is_favorite=d.get('is_favorite', None),
-                   is_safe=d.get('is_safe', None),
-                   last_modified_by=_from_dict(d, 'last_modified_by', User),
-                   last_modified_by_id=d.get('last_modified_by_id', None),
-                   latest_query_data_id=d.get('latest_query_data_id', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', QueryOptions),
-                   parent=d.get('parent', None),
-                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
-                   query=d.get('query', None),
-                   query_hash=d.get('query_hash', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
-                   tags=d.get('tags', None),
-                   updated_at=d.get('updated_at', None),
-                   user=_from_dict(d, 'user', User),
-                   user_id=d.get('user_id', None),
-                   visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization))
+        return cls(
+            can_edit=d.get("can_edit", None),
+            created_at=d.get("created_at", None),
+            data_source_id=d.get("data_source_id", None),
+            description=d.get("description", None),
+            id=d.get("id", None),
+            is_archived=d.get("is_archived", None),
+            is_draft=d.get("is_draft", None),
+            is_favorite=d.get("is_favorite", None),
+            is_safe=d.get("is_safe", None),
+            last_modified_by=_from_dict(d, "last_modified_by", User),
+            last_modified_by_id=d.get("last_modified_by_id", None),
+            latest_query_data_id=d.get("latest_query_data_id", None),
+            name=d.get("name", None),
+            options=_from_dict(d, "options", QueryOptions),
+            parent=d.get("parent", None),
+            permission_tier=_enum(d, "permission_tier", PermissionLevel),
+            query=d.get("query", None),
+            query_hash=d.get("query_hash", None),
+            run_as_role=_enum(d, "run_as_role", RunAsRole),
+            tags=d.get("tags", None),
+            updated_at=d.get("updated_at", None),
+            user=_from_dict(d, "user", User),
+            user_id=d.get("user_id", None),
+            visualizations=_repeated_dict(d, "visualizations", LegacyVisualization),
+        )
 
 
 @dataclass
@@ -3273,46 +3936,64 @@ class LegacyVisualization:
     def as_dict(self) -> dict:
         """Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query: body['query'] = self.query.as_dict()
-        if self.type is not None: body['type'] = self.type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.query:
+            body["query"] = self.query.as_dict()
+        if self.type is not None:
+            body["type"] = self.type
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the LegacyVisualization into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.description is not None: body['description'] = self.description
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query: body['query'] = self.query
-        if self.type is not None: body['type'] = self.type
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.description is not None:
+            body["description"] = self.description
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.query:
+            body["query"] = self.query
+        if self.type is not None:
+            body["type"] = self.type
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization:
         """Deserializes the LegacyVisualization from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   description=d.get('description', None),
-                   id=d.get('id', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   query=_from_dict(d, 'query', LegacyQuery),
-                   type=d.get('type', None),
-                   updated_at=d.get('updated_at', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            description=d.get("description", None),
+            id=d.get("id", None),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            query=_from_dict(d, "query", LegacyQuery),
+            type=d.get("type", None),
+            updated_at=d.get("updated_at", None),
+        )
 
 
 class LifecycleState(Enum):
 
-    ACTIVE = 'ACTIVE'
-    TRASHED = 'TRASHED'
+    ACTIVE = "ACTIVE"
+    TRASHED = "TRASHED"
 
 
 @dataclass
@@ -3324,22 +4005,28 @@ class ListAlertsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAlertsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = self.results
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse:
         """Deserializes the ListAlertsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   results=_repeated_dict(d, 'results', ListAlertsResponseAlert))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            results=_repeated_dict(d, "results", ListAlertsResponseAlert),
+        )
 
 
 @dataclass
@@ -3396,64 +4083,94 @@ class ListAlertsResponseAlert:
     def as_dict(self) -> dict:
         """Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.condition: body['condition'] = self.condition.as_dict()
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
-        if self.state is not None: body['state'] = self.state.value
-        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.condition:
+            body["condition"] = self.condition.as_dict()
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state.value
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
+        if self.state is not None:
+            body["state"] = self.state.value
+        if self.trigger_time is not None:
+            body["trigger_time"] = self.trigger_time
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAlertsResponseAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.condition: body['condition'] = self.condition
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
-        if self.state is not None: body['state'] = self.state
-        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.condition:
+            body["condition"] = self.condition
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
+        if self.state is not None:
+            body["state"] = self.state
+        if self.trigger_time is not None:
+            body["trigger_time"] = self.trigger_time
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
         """Deserializes the ListAlertsResponseAlert from a dictionary."""
-        return cls(condition=_from_dict(d, 'condition', AlertCondition),
-                   create_time=d.get('create_time', None),
-                   custom_body=d.get('custom_body', None),
-                   custom_subject=d.get('custom_subject', None),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   notify_on_ok=d.get('notify_on_ok', None),
-                   owner_user_name=d.get('owner_user_name', None),
-                   query_id=d.get('query_id', None),
-                   seconds_to_retrigger=d.get('seconds_to_retrigger', None),
-                   state=_enum(d, 'state', AlertState),
-                   trigger_time=d.get('trigger_time', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            condition=_from_dict(d, "condition", AlertCondition),
+            create_time=d.get("create_time", None),
+            custom_body=d.get("custom_body", None),
+            custom_subject=d.get("custom_subject", None),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+            lifecycle_state=_enum(d, "lifecycle_state", LifecycleState),
+            notify_on_ok=d.get("notify_on_ok", None),
+            owner_user_name=d.get("owner_user_name", None),
+            query_id=d.get("query_id", None),
+            seconds_to_retrigger=d.get("seconds_to_retrigger", None),
+            state=_enum(d, "state", AlertState),
+            trigger_time=d.get("trigger_time", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 class ListOrder(Enum):
 
-    CREATED_AT = 'created_at'
-    NAME = 'name'
+    CREATED_AT = "created_at"
+    NAME = "name"
 
 
 @dataclass
@@ -3469,25 +4186,33 @@ class ListQueriesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListQueriesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.has_next_page is not None: body['has_next_page'] = self.has_next_page
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.res: body['res'] = [v.as_dict() for v in self.res]
+        if self.has_next_page is not None:
+            body["has_next_page"] = self.has_next_page
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.res:
+            body["res"] = [v.as_dict() for v in self.res]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListQueriesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.has_next_page is not None: body['has_next_page'] = self.has_next_page
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.res: body['res'] = self.res
+        if self.has_next_page is not None:
+            body["has_next_page"] = self.has_next_page
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.res:
+            body["res"] = self.res
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse:
         """Deserializes the ListQueriesResponse from a dictionary."""
-        return cls(has_next_page=d.get('has_next_page', None),
-                   next_page_token=d.get('next_page_token', None),
-                   res=_repeated_dict(d, 'res', QueryInfo))
+        return cls(
+            has_next_page=d.get("has_next_page", None),
+            next_page_token=d.get("next_page_token", None),
+            res=_repeated_dict(d, "res", QueryInfo),
+        )
 
 
 @dataclass
@@ -3499,22 +4224,28 @@ class ListQueryObjectsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListQueryObjectsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = self.results
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse:
         """Deserializes the ListQueryObjectsResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            results=_repeated_dict(d, "results", ListQueryObjectsResponseQuery),
+        )
 
 
 @dataclass
@@ -3569,66 +4300,98 @@ class ListQueryObjectsResponseQuery:
     def as_dict(self) -> dict:
         """Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_modifier_user_name is not None:
-            body['last_modifier_user_name'] = self.last_modifier_user_name
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["last_modifier_user_name"] = self.last_modifier_user_name
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state.value
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode.value
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListQueryObjectsResponseQuery into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_modifier_user_name is not None:
-            body['last_modifier_user_name'] = self.last_modifier_user_name
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = self.parameters
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = self.tags
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["last_modifier_user_name"] = self.last_modifier_user_name
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = self.tags
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery:
         """Deserializes the ListQueryObjectsResponseQuery from a dictionary."""
-        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
-                   catalog=d.get('catalog', None),
-                   create_time=d.get('create_time', None),
-                   description=d.get('description', None),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None),
-                   last_modifier_user_name=d.get('last_modifier_user_name', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   owner_user_name=d.get('owner_user_name', None),
-                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
-                   query_text=d.get('query_text', None),
-                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
-                   schema=d.get('schema', None),
-                   tags=d.get('tags', None),
-                   update_time=d.get('update_time', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            apply_auto_limit=d.get("apply_auto_limit", None),
+            catalog=d.get("catalog", None),
+            create_time=d.get("create_time", None),
+            description=d.get("description", None),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+            last_modifier_user_name=d.get("last_modifier_user_name", None),
+            lifecycle_state=_enum(d, "lifecycle_state", LifecycleState),
+            owner_user_name=d.get("owner_user_name", None),
+            parameters=_repeated_dict(d, "parameters", QueryParameter),
+            query_text=d.get("query_text", None),
+            run_as_mode=_enum(d, "run_as_mode", RunAsMode),
+            schema=d.get("schema", None),
+            tags=d.get("tags", None),
+            update_time=d.get("update_time", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -3648,28 +4411,38 @@ class ListResponse:
     def as_dict(self) -> dict:
         """Serializes the ListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.page is not None: body['page'] = self.page
-        if self.page_size is not None: body['page_size'] = self.page_size
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.count is not None:
+            body["count"] = self.count
+        if self.page is not None:
+            body["page"] = self.page
+        if self.page_size is not None:
+            body["page_size"] = self.page_size
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.page is not None: body['page'] = self.page
-        if self.page_size is not None: body['page_size'] = self.page_size
-        if self.results: body['results'] = self.results
+        if self.count is not None:
+            body["count"] = self.count
+        if self.page is not None:
+            body["page"] = self.page
+        if self.page_size is not None:
+            body["page_size"] = self.page_size
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListResponse:
         """Deserializes the ListResponse from a dictionary."""
-        return cls(count=d.get('count', None),
-                   page=d.get('page', None),
-                   page_size=d.get('page_size', None),
-                   results=_repeated_dict(d, 'results', Dashboard))
+        return cls(
+            count=d.get("count", None),
+            page=d.get("page", None),
+            page_size=d.get("page_size", None),
+            results=_repeated_dict(d, "results", Dashboard),
+        )
 
 
 @dataclass
@@ -3681,22 +4454,28 @@ class ListVisualizationsForQueryResponse:
     def as_dict(self) -> dict:
         """Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListVisualizationsForQueryResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.results: body['results'] = self.results
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse:
         """Deserializes the ListVisualizationsForQueryResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   results=_repeated_dict(d, 'results', Visualization))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            results=_repeated_dict(d, "results", Visualization),
+        )
 
 
 @dataclass
@@ -3707,19 +4486,21 @@ class ListWarehousesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses]
+        if self.warehouses:
+            body["warehouses"] = [v.as_dict() for v in self.warehouses]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.warehouses: body['warehouses'] = self.warehouses
+        if self.warehouses:
+            body["warehouses"] = self.warehouses
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse:
         """Deserializes the ListWarehousesResponse from a dictionary."""
-        return cls(warehouses=_repeated_dict(d, 'warehouses', EndpointInfo))
+        return cls(warehouses=_repeated_dict(d, "warehouses", EndpointInfo))
 
 
 @dataclass
@@ -3736,25 +4517,33 @@ class MultiValuesOptions:
     def as_dict(self) -> dict:
         """Serializes the MultiValuesOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.prefix is not None: body['prefix'] = self.prefix
-        if self.separator is not None: body['separator'] = self.separator
-        if self.suffix is not None: body['suffix'] = self.suffix
+        if self.prefix is not None:
+            body["prefix"] = self.prefix
+        if self.separator is not None:
+            body["separator"] = self.separator
+        if self.suffix is not None:
+            body["suffix"] = self.suffix
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MultiValuesOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.prefix is not None: body['prefix'] = self.prefix
-        if self.separator is not None: body['separator'] = self.separator
-        if self.suffix is not None: body['suffix'] = self.suffix
+        if self.prefix is not None:
+            body["prefix"] = self.prefix
+        if self.separator is not None:
+            body["separator"] = self.separator
+        if self.suffix is not None:
+            body["suffix"] = self.suffix
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions:
         """Deserializes the MultiValuesOptions from a dictionary."""
-        return cls(prefix=d.get('prefix', None),
-                   separator=d.get('separator', None),
-                   suffix=d.get('suffix', None))
+        return cls(
+            prefix=d.get("prefix", None),
+            separator=d.get("separator", None),
+            suffix=d.get("suffix", None),
+        )
 
 
 @dataclass
@@ -3764,37 +4553,39 @@ class NumericValue:
     def as_dict(self) -> dict:
         """Serializes the NumericValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the NumericValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NumericValue:
         """Deserializes the NumericValue from a dictionary."""
-        return cls(value=d.get('value', None))
+        return cls(value=d.get("value", None))
 
 
 class ObjectType(Enum):
     """A singular noun object type."""
 
-    ALERT = 'alert'
-    DASHBOARD = 'dashboard'
-    DATA_SOURCE = 'data_source'
-    QUERY = 'query'
+    ALERT = "alert"
+    DASHBOARD = "dashboard"
+    DATA_SOURCE = "data_source"
+    QUERY = "query"
 
 
 class ObjectTypePlural(Enum):
     """Always a plural of the object type."""
 
-    ALERTS = 'alerts'
-    DASHBOARDS = 'dashboards'
-    DATA_SOURCES = 'data_sources'
-    QUERIES = 'queries'
+    ALERTS = "alerts"
+    DASHBOARDS = "dashboards"
+    DATA_SOURCES = "data_sources"
+    QUERIES = "queries"
 
 
 @dataclass
@@ -3810,36 +4601,46 @@ class OdbcParams:
     def as_dict(self) -> dict:
         """Serializes the OdbcParams into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.hostname is not None: body['hostname'] = self.hostname
-        if self.path is not None: body['path'] = self.path
-        if self.port is not None: body['port'] = self.port
-        if self.protocol is not None: body['protocol'] = self.protocol
+        if self.hostname is not None:
+            body["hostname"] = self.hostname
+        if self.path is not None:
+            body["path"] = self.path
+        if self.port is not None:
+            body["port"] = self.port
+        if self.protocol is not None:
+            body["protocol"] = self.protocol
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the OdbcParams into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.hostname is not None: body['hostname'] = self.hostname
-        if self.path is not None: body['path'] = self.path
-        if self.port is not None: body['port'] = self.port
-        if self.protocol is not None: body['protocol'] = self.protocol
+        if self.hostname is not None:
+            body["hostname"] = self.hostname
+        if self.path is not None:
+            body["path"] = self.path
+        if self.port is not None:
+            body["port"] = self.port
+        if self.protocol is not None:
+            body["protocol"] = self.protocol
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OdbcParams:
         """Deserializes the OdbcParams from a dictionary."""
-        return cls(hostname=d.get('hostname', None),
-                   path=d.get('path', None),
-                   port=d.get('port', None),
-                   protocol=d.get('protocol', None))
+        return cls(
+            hostname=d.get("hostname", None),
+            path=d.get("path", None),
+            port=d.get("port", None),
+            protocol=d.get("protocol", None),
+        )
 
 
 class OwnableObjectType(Enum):
     """The singular form of the type of object which can be owned."""
 
-    ALERT = 'alert'
-    DASHBOARD = 'dashboard'
-    QUERY = 'query'
+    ALERT = "alert"
+    DASHBOARD = "dashboard"
+    QUERY = "query"
 
 
 @dataclass
@@ -3870,68 +4671,84 @@ class Parameter:
     def as_dict(self) -> dict:
         """Serializes the Parameter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enum_options is not None: body['enumOptions'] = self.enum_options
-        if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.query_id is not None: body['queryId'] = self.query_id
-        if self.title is not None: body['title'] = self.title
-        if self.type is not None: body['type'] = self.type.value
-        if self.value: body['value'] = self.value
+        if self.enum_options is not None:
+            body["enumOptions"] = self.enum_options
+        if self.multi_values_options:
+            body["multiValuesOptions"] = self.multi_values_options.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.query_id is not None:
+            body["queryId"] = self.query_id
+        if self.title is not None:
+            body["title"] = self.title
+        if self.type is not None:
+            body["type"] = self.type.value
+        if self.value:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Parameter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enum_options is not None: body['enumOptions'] = self.enum_options
-        if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options
-        if self.name is not None: body['name'] = self.name
-        if self.query_id is not None: body['queryId'] = self.query_id
-        if self.title is not None: body['title'] = self.title
-        if self.type is not None: body['type'] = self.type
-        if self.value: body['value'] = self.value
+        if self.enum_options is not None:
+            body["enumOptions"] = self.enum_options
+        if self.multi_values_options:
+            body["multiValuesOptions"] = self.multi_values_options
+        if self.name is not None:
+            body["name"] = self.name
+        if self.query_id is not None:
+            body["queryId"] = self.query_id
+        if self.title is not None:
+            body["title"] = self.title
+        if self.type is not None:
+            body["type"] = self.type
+        if self.value:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Parameter:
         """Deserializes the Parameter from a dictionary."""
-        return cls(enum_options=d.get('enumOptions', None),
-                   multi_values_options=_from_dict(d, 'multiValuesOptions', MultiValuesOptions),
-                   name=d.get('name', None),
-                   query_id=d.get('queryId', None),
-                   title=d.get('title', None),
-                   type=_enum(d, 'type', ParameterType),
-                   value=d.get('value', None))
+        return cls(
+            enum_options=d.get("enumOptions", None),
+            multi_values_options=_from_dict(d, "multiValuesOptions", MultiValuesOptions),
+            name=d.get("name", None),
+            query_id=d.get("queryId", None),
+            title=d.get("title", None),
+            type=_enum(d, "type", ParameterType),
+            value=d.get("value", None),
+        )
 
 
 class ParameterType(Enum):
     """Parameters can have several different types."""
 
-    DATETIME = 'datetime'
-    ENUM = 'enum'
-    NUMBER = 'number'
-    QUERY = 'query'
-    TEXT = 'text'
+    DATETIME = "datetime"
+    ENUM = "enum"
+    NUMBER = "number"
+    QUERY = "query"
+    TEXT = "text"
 
 
 class PermissionLevel(Enum):
     """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
     * `CAN_MANAGE`: Can manage the query"""
 
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_RUN = 'CAN_RUN'
-    CAN_VIEW = 'CAN_VIEW'
+    CAN_EDIT = "CAN_EDIT"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_RUN = "CAN_RUN"
+    CAN_VIEW = "CAN_VIEW"
 
 
 class PlansState(Enum):
     """Possible Reasons for which we have not saved plans in the database"""
 
-    EMPTY = 'EMPTY'
-    EXISTS = 'EXISTS'
-    IGNORED_LARGE_PLANS_SIZE = 'IGNORED_LARGE_PLANS_SIZE'
-    IGNORED_SMALL_DURATION = 'IGNORED_SMALL_DURATION'
-    IGNORED_SPARK_PLAN_TYPE = 'IGNORED_SPARK_PLAN_TYPE'
-    UNKNOWN = 'UNKNOWN'
+    EMPTY = "EMPTY"
+    EXISTS = "EXISTS"
+    IGNORED_LARGE_PLANS_SIZE = "IGNORED_LARGE_PLANS_SIZE"
+    IGNORED_SMALL_DURATION = "IGNORED_SMALL_DURATION"
+    IGNORED_SPARK_PLAN_TYPE = "IGNORED_SPARK_PLAN_TYPE"
+    UNKNOWN = "UNKNOWN"
 
 
 @dataclass
@@ -3989,69 +4806,103 @@ class Query:
     def as_dict(self) -> dict:
         """Serializes the Query into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_modifier_user_name is not None:
-            body['last_modifier_user_name'] = self.last_modifier_user_name
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["last_modifier_user_name"] = self.last_modifier_user_name
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state.value
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode.value
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Query into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_modifier_user_name is not None:
-            body['last_modifier_user_name'] = self.last_modifier_user_name
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = self.parameters
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = self.tags
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["last_modifier_user_name"] = self.last_modifier_user_name
+        if self.lifecycle_state is not None:
+            body["lifecycle_state"] = self.lifecycle_state
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.parent_path is not None:
+            body["parent_path"] = self.parent_path
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = self.tags
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Query:
         """Deserializes the Query from a dictionary."""
-        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
-                   catalog=d.get('catalog', None),
-                   create_time=d.get('create_time', None),
-                   description=d.get('description', None),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None),
-                   last_modifier_user_name=d.get('last_modifier_user_name', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   owner_user_name=d.get('owner_user_name', None),
-                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
-                   parent_path=d.get('parent_path', None),
-                   query_text=d.get('query_text', None),
-                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
-                   schema=d.get('schema', None),
-                   tags=d.get('tags', None),
-                   update_time=d.get('update_time', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            apply_auto_limit=d.get("apply_auto_limit", None),
+            catalog=d.get("catalog", None),
+            create_time=d.get("create_time", None),
+            description=d.get("description", None),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+            last_modifier_user_name=d.get("last_modifier_user_name", None),
+            lifecycle_state=_enum(d, "lifecycle_state", LifecycleState),
+            owner_user_name=d.get("owner_user_name", None),
+            parameters=_repeated_dict(d, "parameters", QueryParameter),
+            parent_path=d.get("parent_path", None),
+            query_text=d.get("query_text", None),
+            run_as_mode=_enum(d, "run_as_mode", RunAsMode),
+            schema=d.get("schema", None),
+            tags=d.get("tags", None),
+            update_time=d.get("update_time", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -4068,25 +4919,33 @@ class QueryBackedValue:
     def as_dict(self) -> dict:
         """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict()
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.values: body['values'] = [v for v in self.values]
+        if self.multi_values_options:
+            body["multi_values_options"] = self.multi_values_options.as_dict()
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.values:
+            body["values"] = [v for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.values: body['values'] = self.values
+        if self.multi_values_options:
+            body["multi_values_options"] = self.multi_values_options
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue:
         """Deserializes the QueryBackedValue from a dictionary."""
-        return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions),
-                   query_id=d.get('query_id', None),
-                   values=d.get('values', None))
+        return cls(
+            multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions),
+            query_id=d.get("query_id", None),
+            values=d.get("values", None),
+        )
 
 
 @dataclass
@@ -4122,40 +4981,58 @@ class QueryEditContent:
     def as_dict(self) -> dict:
         """Serializes the QueryEditContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query is not None: body['query'] = self.query
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
-        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.query is not None:
+            body["query"] = self.query
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role.value
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.query is not None: body['query'] = self.query
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
-        if self.tags: body['tags'] = self.tags
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.query is not None:
+            body["query"] = self.query
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEditContent:
         """Deserializes the QueryEditContent from a dictionary."""
-        return cls(data_source_id=d.get('data_source_id', None),
-                   description=d.get('description', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   query=d.get('query', None),
-                   query_id=d.get('query_id', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
-                   tags=d.get('tags', None))
+        return cls(
+            data_source_id=d.get("data_source_id", None),
+            description=d.get("description", None),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            query=d.get("query", None),
+            query_id=d.get("query_id", None),
+            run_as_role=_enum(d, "run_as_role", RunAsRole),
+            tags=d.get("tags", None),
+        )
 
 
 @dataclass
@@ -4177,31 +5054,43 @@ class QueryFilter:
     def as_dict(self) -> dict:
         """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict()
-        if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids]
-        if self.statuses: body['statuses'] = [v.value for v in self.statuses]
-        if self.user_ids: body['user_ids'] = [v for v in self.user_ids]
-        if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids]
+        if self.query_start_time_range:
+            body["query_start_time_range"] = self.query_start_time_range.as_dict()
+        if self.statement_ids:
+            body["statement_ids"] = [v for v in self.statement_ids]
+        if self.statuses:
+            body["statuses"] = [v.value for v in self.statuses]
+        if self.user_ids:
+            body["user_ids"] = [v for v in self.user_ids]
+        if self.warehouse_ids:
+            body["warehouse_ids"] = [v for v in self.warehouse_ids]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryFilter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range
-        if self.statement_ids: body['statement_ids'] = self.statement_ids
-        if self.statuses: body['statuses'] = self.statuses
-        if self.user_ids: body['user_ids'] = self.user_ids
-        if self.warehouse_ids: body['warehouse_ids'] = self.warehouse_ids
+        if self.query_start_time_range:
+            body["query_start_time_range"] = self.query_start_time_range
+        if self.statement_ids:
+            body["statement_ids"] = self.statement_ids
+        if self.statuses:
+            body["statuses"] = self.statuses
+        if self.user_ids:
+            body["user_ids"] = self.user_ids
+        if self.warehouse_ids:
+            body["warehouse_ids"] = self.warehouse_ids
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
         """Deserializes the QueryFilter from a dictionary."""
-        return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange),
-                   statement_ids=d.get('statement_ids', None),
-                   statuses=_repeated_enum(d, 'statuses', QueryStatus),
-                   user_ids=d.get('user_ids', None),
-                   warehouse_ids=d.get('warehouse_ids', None))
+        return cls(
+            query_start_time_range=_from_dict(d, "query_start_time_range", TimeRange),
+            statement_ids=d.get("statement_ids", None),
+            statuses=_repeated_enum(d, "statuses", QueryStatus),
+            user_ids=d.get("user_ids", None),
+            warehouse_ids=d.get("warehouse_ids", None),
+        )
 
 
 @dataclass
@@ -4279,82 +5168,128 @@ class QueryInfo:
     def as_dict(self) -> dict:
         """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.channel_used: body['channel_used'] = self.channel_used.as_dict()
-        if self.duration is not None: body['duration'] = self.duration
-        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id
-        if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name
-        if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms
-        if self.is_final is not None: body['is_final'] = self.is_final
-        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
-        if self.metrics: body['metrics'] = self.metrics.as_dict()
-        if self.plans_state is not None: body['plans_state'] = self.plans_state.value
-        if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
-        if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url
-        if self.statement_type is not None: body['statement_type'] = self.statement_type.value
-        if self.status is not None: body['status'] = self.status.value
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.user_name is not None: body['user_name'] = self.user_name
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.channel_used:
+            body["channel_used"] = self.channel_used.as_dict()
+        if self.duration is not None:
+            body["duration"] = self.duration
+        if self.endpoint_id is not None:
+            body["endpoint_id"] = self.endpoint_id
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.executed_as_user_id is not None:
+            body["executed_as_user_id"] = self.executed_as_user_id
+        if self.executed_as_user_name is not None:
+            body["executed_as_user_name"] = self.executed_as_user_name
+        if self.execution_end_time_ms is not None:
+            body["execution_end_time_ms"] = self.execution_end_time_ms
+        if self.is_final is not None:
+            body["is_final"] = self.is_final
+        if self.lookup_key is not None:
+            body["lookup_key"] = self.lookup_key
+        if self.metrics:
+            body["metrics"] = self.metrics.as_dict()
+        if self.plans_state is not None:
+            body["plans_state"] = self.plans_state.value
+        if self.query_end_time_ms is not None:
+            body["query_end_time_ms"] = self.query_end_time_ms
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.query_start_time_ms is not None:
+            body["query_start_time_ms"] = self.query_start_time_ms
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.rows_produced is not None:
+            body["rows_produced"] = self.rows_produced
+        if self.spark_ui_url is not None:
+            body["spark_ui_url"] = self.spark_ui_url
+        if self.statement_type is not None:
+            body["statement_type"] = self.statement_type.value
+        if self.status is not None:
+            body["status"] = self.status.value
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.channel_used: body['channel_used'] = self.channel_used
-        if self.duration is not None: body['duration'] = self.duration
-        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
-        if self.error_message is not None: body['error_message'] = self.error_message
-        if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id
-        if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name
-        if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms
-        if self.is_final is not None: body['is_final'] = self.is_final
-        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
-        if self.metrics: body['metrics'] = self.metrics
-        if self.plans_state is not None: body['plans_state'] = self.plans_state
-        if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
-        if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url
-        if self.statement_type is not None: body['statement_type'] = self.statement_type
-        if self.status is not None: body['status'] = self.status
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.user_name is not None: body['user_name'] = self.user_name
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.channel_used:
+            body["channel_used"] = self.channel_used
+        if self.duration is not None:
+            body["duration"] = self.duration
+        if self.endpoint_id is not None:
+            body["endpoint_id"] = self.endpoint_id
+        if self.error_message is not None:
+            body["error_message"] = self.error_message
+        if self.executed_as_user_id is not None:
+            body["executed_as_user_id"] = self.executed_as_user_id
+        if self.executed_as_user_name is not None:
+            body["executed_as_user_name"] = self.executed_as_user_name
+        if self.execution_end_time_ms is not None:
+            body["execution_end_time_ms"] = self.execution_end_time_ms
+        if self.is_final is not None:
+            body["is_final"] = self.is_final
+        if self.lookup_key is not None:
+            body["lookup_key"] = self.lookup_key
+        if self.metrics:
+            body["metrics"] = self.metrics
+        if self.plans_state is not None:
+            body["plans_state"] = self.plans_state
+        if self.query_end_time_ms is not None:
+            body["query_end_time_ms"] = self.query_end_time_ms
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.query_start_time_ms is not None:
+            body["query_start_time_ms"] = self.query_start_time_ms
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.rows_produced is not None:
+            body["rows_produced"] = self.rows_produced
+        if self.spark_ui_url is not None:
+            body["spark_ui_url"] = self.spark_ui_url
+        if self.statement_type is not None:
+            body["statement_type"] = self.statement_type
+        if self.status is not None:
+            body["status"] = self.status
+        if self.user_id is not None:
+            body["user_id"] = self.user_id
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
         """Deserializes the QueryInfo from a dictionary."""
-        return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo),
-                   duration=d.get('duration', None),
-                   endpoint_id=d.get('endpoint_id', None),
-                   error_message=d.get('error_message', None),
-                   executed_as_user_id=d.get('executed_as_user_id', None),
-                   executed_as_user_name=d.get('executed_as_user_name', None),
-                   execution_end_time_ms=d.get('execution_end_time_ms', None),
-                   is_final=d.get('is_final', None),
-                   lookup_key=d.get('lookup_key', None),
-                   metrics=_from_dict(d, 'metrics', QueryMetrics),
-                   plans_state=_enum(d, 'plans_state', PlansState),
-                   query_end_time_ms=d.get('query_end_time_ms', None),
-                   query_id=d.get('query_id', None),
-                   query_start_time_ms=d.get('query_start_time_ms', None),
-                   query_text=d.get('query_text', None),
-                   rows_produced=d.get('rows_produced', None),
-                   spark_ui_url=d.get('spark_ui_url', None),
-                   statement_type=_enum(d, 'statement_type', QueryStatementType),
-                   status=_enum(d, 'status', QueryStatus),
-                   user_id=d.get('user_id', None),
-                   user_name=d.get('user_name', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            channel_used=_from_dict(d, "channel_used", ChannelInfo),
+            duration=d.get("duration", None),
+            endpoint_id=d.get("endpoint_id", None),
+            error_message=d.get("error_message", None),
+            executed_as_user_id=d.get("executed_as_user_id", None),
+            executed_as_user_name=d.get("executed_as_user_name", None),
+            execution_end_time_ms=d.get("execution_end_time_ms", None),
+            is_final=d.get("is_final", None),
+            lookup_key=d.get("lookup_key", None),
+            metrics=_from_dict(d, "metrics", QueryMetrics),
+            plans_state=_enum(d, "plans_state", PlansState),
+            query_end_time_ms=d.get("query_end_time_ms", None),
+            query_id=d.get("query_id", None),
+            query_start_time_ms=d.get("query_start_time_ms", None),
+            query_text=d.get("query_text", None),
+            rows_produced=d.get("rows_produced", None),
+            spark_ui_url=d.get("spark_ui_url", None),
+            statement_type=_enum(d, "statement_type", QueryStatementType),
+            status=_enum(d, "status", QueryStatus),
+            user_id=d.get("user_id", None),
+            user_name=d.get("user_name", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -4374,28 +5309,38 @@ class QueryList:
     def as_dict(self) -> dict:
         """Serializes the QueryList into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.page is not None: body['page'] = self.page
-        if self.page_size is not None: body['page_size'] = self.page_size
-        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        if self.count is not None:
+            body["count"] = self.count
+        if self.page is not None:
+            body["page"] = self.page
+        if self.page_size is not None:
+            body["page_size"] = self.page_size
+        if self.results:
+            body["results"] = [v.as_dict() for v in self.results]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.count is not None: body['count'] = self.count
-        if self.page is not None: body['page'] = self.page
-        if self.page_size is not None: body['page_size'] = self.page_size
-        if self.results: body['results'] = self.results
+        if self.count is not None:
+            body["count"] = self.count
+        if self.page is not None:
+            body["page"] = self.page
+        if self.page_size is not None:
+            body["page_size"] = self.page_size
+        if self.results:
+            body["results"] = self.results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryList:
         """Deserializes the QueryList from a dictionary."""
-        return cls(count=d.get('count', None),
-                   page=d.get('page', None),
-                   page_size=d.get('page_size', None),
-                   results=_repeated_dict(d, 'results', LegacyQuery))
+        return cls(
+            count=d.get("count", None),
+            page=d.get("page", None),
+            page_size=d.get("page_size", None),
+            results=_repeated_dict(d, "results", LegacyQuery),
+        )
 
 
 @dataclass
@@ -4475,88 +5420,128 @@ class QueryMetrics:
     def as_dict(self) -> dict:
         """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
-        if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
-        if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
+        if self.compilation_time_ms is not None:
+            body["compilation_time_ms"] = self.compilation_time_ms
+        if self.execution_time_ms is not None:
+            body["execution_time_ms"] = self.execution_time_ms
+        if self.network_sent_bytes is not None:
+            body["network_sent_bytes"] = self.network_sent_bytes
         if self.overloading_queue_start_timestamp is not None:
-            body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
-        if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
+            body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp
+        if self.photon_total_time_ms is not None:
+            body["photon_total_time_ms"] = self.photon_total_time_ms
         if self.provisioning_queue_start_timestamp is not None:
-            body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
-        if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
-        if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
+            body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp
+        if self.pruned_bytes is not None:
+            body["pruned_bytes"] = self.pruned_bytes
+        if self.pruned_files_count is not None:
+            body["pruned_files_count"] = self.pruned_files_count
         if self.query_compilation_start_timestamp is not None:
-            body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
-        if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
-        if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
-        if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
-        if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count
-        if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes
-        if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms
-        if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache
-        if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count
-        if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count
-        if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes
-        if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms
-        if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms
-        if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes
+            body["query_compilation_start_timestamp"] = self.query_compilation_start_timestamp
+        if self.read_bytes is not None:
+            body["read_bytes"] = self.read_bytes
+        if self.read_cache_bytes is not None:
+            body["read_cache_bytes"] = self.read_cache_bytes
+        if self.read_files_count is not None:
+            body["read_files_count"] = self.read_files_count
+        if self.read_partitions_count is not None:
+            body["read_partitions_count"] = self.read_partitions_count
+        if self.read_remote_bytes is not None:
+            body["read_remote_bytes"] = self.read_remote_bytes
+        if self.result_fetch_time_ms is not None:
+            body["result_fetch_time_ms"] = self.result_fetch_time_ms
+        if self.result_from_cache is not None:
+            body["result_from_cache"] = self.result_from_cache
+        if self.rows_produced_count is not None:
+            body["rows_produced_count"] = self.rows_produced_count
+        if self.rows_read_count is not None:
+            body["rows_read_count"] = self.rows_read_count
+        if self.spill_to_disk_bytes is not None:
+            body["spill_to_disk_bytes"] = self.spill_to_disk_bytes
+        if self.task_total_time_ms is not None:
+            body["task_total_time_ms"] = self.task_total_time_ms
+        if self.total_time_ms is not None:
+            body["total_time_ms"] = self.total_time_ms
+        if self.write_remote_bytes is not None:
+            body["write_remote_bytes"] = self.write_remote_bytes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryMetrics into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
-        if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
-        if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
+        if self.compilation_time_ms is not None:
+            body["compilation_time_ms"] = self.compilation_time_ms
+        if self.execution_time_ms is not None:
+            body["execution_time_ms"] = self.execution_time_ms
+        if self.network_sent_bytes is not None:
+            body["network_sent_bytes"] = self.network_sent_bytes
         if self.overloading_queue_start_timestamp is not None:
-            body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
-        if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
+            body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp
+        if self.photon_total_time_ms is not None:
+            body["photon_total_time_ms"] = self.photon_total_time_ms
         if self.provisioning_queue_start_timestamp is not None:
-            body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
-        if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
-        if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
+            body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp
+        if self.pruned_bytes is not None:
+            body["pruned_bytes"] = self.pruned_bytes
+        if self.pruned_files_count is not None:
+            body["pruned_files_count"] = self.pruned_files_count
         if self.query_compilation_start_timestamp is not None:
-            body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
-        if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
-        if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
-        if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
-        if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count
-        if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes
-        if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms
-        if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache
-        if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count
-        if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count
-        if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes
-        if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms
-        if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms
-        if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes
+            body["query_compilation_start_timestamp"] = self.query_compilation_start_timestamp
+        if self.read_bytes is not None:
+            body["read_bytes"] = self.read_bytes
+        if self.read_cache_bytes is not None:
+            body["read_cache_bytes"] = self.read_cache_bytes
+        if self.read_files_count is not None:
+            body["read_files_count"] = self.read_files_count
+        if self.read_partitions_count is not None:
+            body["read_partitions_count"] = self.read_partitions_count
+        if self.read_remote_bytes is not None:
+            body["read_remote_bytes"] = self.read_remote_bytes
+        if self.result_fetch_time_ms is not None:
+            body["result_fetch_time_ms"] = self.result_fetch_time_ms
+        if self.result_from_cache is not None:
+            body["result_from_cache"] = self.result_from_cache
+        if self.rows_produced_count is not None:
+            body["rows_produced_count"] = self.rows_produced_count
+        if self.rows_read_count is not None:
+            body["rows_read_count"] = self.rows_read_count
+        if self.spill_to_disk_bytes is not None:
+            body["spill_to_disk_bytes"] = self.spill_to_disk_bytes
+        if self.task_total_time_ms is not None:
+            body["task_total_time_ms"] = self.task_total_time_ms
+        if self.total_time_ms is not None:
+            body["total_time_ms"] = self.total_time_ms
+        if self.write_remote_bytes is not None:
+            body["write_remote_bytes"] = self.write_remote_bytes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryMetrics:
         """Deserializes the QueryMetrics from a dictionary."""
-        return cls(compilation_time_ms=d.get('compilation_time_ms', None),
-                   execution_time_ms=d.get('execution_time_ms', None),
-                   network_sent_bytes=d.get('network_sent_bytes', None),
-                   overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None),
-                   photon_total_time_ms=d.get('photon_total_time_ms', None),
-                   provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None),
-                   pruned_bytes=d.get('pruned_bytes', None),
-                   pruned_files_count=d.get('pruned_files_count', None),
-                   query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None),
-                   read_bytes=d.get('read_bytes', None),
-                   read_cache_bytes=d.get('read_cache_bytes', None),
-                   read_files_count=d.get('read_files_count', None),
-                   read_partitions_count=d.get('read_partitions_count', None),
-                   read_remote_bytes=d.get('read_remote_bytes', None),
-                   result_fetch_time_ms=d.get('result_fetch_time_ms', None),
-                   result_from_cache=d.get('result_from_cache', None),
-                   rows_produced_count=d.get('rows_produced_count', None),
-                   rows_read_count=d.get('rows_read_count', None),
-                   spill_to_disk_bytes=d.get('spill_to_disk_bytes', None),
-                   task_total_time_ms=d.get('task_total_time_ms', None),
-                   total_time_ms=d.get('total_time_ms', None),
-                   write_remote_bytes=d.get('write_remote_bytes', None))
+        return cls(
+            compilation_time_ms=d.get("compilation_time_ms", None),
+            execution_time_ms=d.get("execution_time_ms", None),
+            network_sent_bytes=d.get("network_sent_bytes", None),
+            overloading_queue_start_timestamp=d.get("overloading_queue_start_timestamp", None),
+            photon_total_time_ms=d.get("photon_total_time_ms", None),
+            provisioning_queue_start_timestamp=d.get("provisioning_queue_start_timestamp", None),
+            pruned_bytes=d.get("pruned_bytes", None),
+            pruned_files_count=d.get("pruned_files_count", None),
+            query_compilation_start_timestamp=d.get("query_compilation_start_timestamp", None),
+            read_bytes=d.get("read_bytes", None),
+            read_cache_bytes=d.get("read_cache_bytes", None),
+            read_files_count=d.get("read_files_count", None),
+            read_partitions_count=d.get("read_partitions_count", None),
+            read_remote_bytes=d.get("read_remote_bytes", None),
+            result_fetch_time_ms=d.get("result_fetch_time_ms", None),
+            result_from_cache=d.get("result_from_cache", None),
+            rows_produced_count=d.get("rows_produced_count", None),
+            rows_read_count=d.get("rows_read_count", None),
+            spill_to_disk_bytes=d.get("spill_to_disk_bytes", None),
+            task_total_time_ms=d.get("task_total_time_ms", None),
+            total_time_ms=d.get("total_time_ms", None),
+            write_remote_bytes=d.get("write_remote_bytes", None),
+        )
 
 
 @dataclass
@@ -4576,28 +5561,38 @@ class QueryOptions:
     def as_dict(self) -> dict:
         """Serializes the QueryOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.schema is not None: body['schema'] = self.schema
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.moved_to_trash_at is not None:
+            body["moved_to_trash_at"] = self.moved_to_trash_at
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.schema is not None:
+            body["schema"] = self.schema
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
-        if self.parameters: body['parameters'] = self.parameters
-        if self.schema is not None: body['schema'] = self.schema
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.moved_to_trash_at is not None:
+            body["moved_to_trash_at"] = self.moved_to_trash_at
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.schema is not None:
+            body["schema"] = self.schema
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryOptions:
         """Deserializes the QueryOptions from a dictionary."""
-        return cls(catalog=d.get('catalog', None),
-                   moved_to_trash_at=d.get('moved_to_trash_at', None),
-                   parameters=_repeated_dict(d, 'parameters', Parameter),
-                   schema=d.get('schema', None))
+        return cls(
+            catalog=d.get("catalog", None),
+            moved_to_trash_at=d.get("moved_to_trash_at", None),
+            parameters=_repeated_dict(d, "parameters", Parameter),
+            schema=d.get("schema", None),
+        )
 
 
 @dataclass
@@ -4630,40 +5625,58 @@ class QueryParameter:
     def as_dict(self) -> dict:
         """Serializes the QueryParameter into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict()
-        if self.date_value: body['date_value'] = self.date_value.as_dict()
-        if self.enum_value: body['enum_value'] = self.enum_value.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict()
-        if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict()
-        if self.text_value: body['text_value'] = self.text_value.as_dict()
-        if self.title is not None: body['title'] = self.title
+        if self.date_range_value:
+            body["date_range_value"] = self.date_range_value.as_dict()
+        if self.date_value:
+            body["date_value"] = self.date_value.as_dict()
+        if self.enum_value:
+            body["enum_value"] = self.enum_value.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
+        if self.numeric_value:
+            body["numeric_value"] = self.numeric_value.as_dict()
+        if self.query_backed_value:
+            body["query_backed_value"] = self.query_backed_value.as_dict()
+        if self.text_value:
+            body["text_value"] = self.text_value.as_dict()
+        if self.title is not None:
+            body["title"] = self.title
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryParameter into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.date_range_value: body['date_range_value'] = self.date_range_value
-        if self.date_value: body['date_value'] = self.date_value
-        if self.enum_value: body['enum_value'] = self.enum_value
-        if self.name is not None: body['name'] = self.name
-        if self.numeric_value: body['numeric_value'] = self.numeric_value
-        if self.query_backed_value: body['query_backed_value'] = self.query_backed_value
-        if self.text_value: body['text_value'] = self.text_value
-        if self.title is not None: body['title'] = self.title
+        if self.date_range_value:
+            body["date_range_value"] = self.date_range_value
+        if self.date_value:
+            body["date_value"] = self.date_value
+        if self.enum_value:
+            body["enum_value"] = self.enum_value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.numeric_value:
+            body["numeric_value"] = self.numeric_value
+        if self.query_backed_value:
+            body["query_backed_value"] = self.query_backed_value
+        if self.text_value:
+            body["text_value"] = self.text_value
+        if self.title is not None:
+            body["title"] = self.title
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryParameter:
         """Deserializes the QueryParameter from a dictionary."""
-        return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue),
-                   date_value=_from_dict(d, 'date_value', DateValue),
-                   enum_value=_from_dict(d, 'enum_value', EnumValue),
-                   name=d.get('name', None),
-                   numeric_value=_from_dict(d, 'numeric_value', NumericValue),
-                   query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue),
-                   text_value=_from_dict(d, 'text_value', TextValue),
-                   title=d.get('title', None))
+        return cls(
+            date_range_value=_from_dict(d, "date_range_value", DateRangeValue),
+            date_value=_from_dict(d, "date_value", DateValue),
+            enum_value=_from_dict(d, "enum_value", EnumValue),
+            name=d.get("name", None),
+            numeric_value=_from_dict(d, "numeric_value", NumericValue),
+            query_backed_value=_from_dict(d, "query_backed_value", QueryBackedValue),
+            text_value=_from_dict(d, "text_value", TextValue),
+            title=d.get("title", None),
+        )
 
 
 @dataclass
@@ -4700,79 +5713,97 @@ class QueryPostContent:
     def as_dict(self) -> dict:
         """Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query is not None: body['query'] = self.query
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
-        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.query is not None:
+            body["query"] = self.query
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role.value
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options
-        if self.parent is not None: body['parent'] = self.parent
-        if self.query is not None: body['query'] = self.query
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
-        if self.tags: body['tags'] = self.tags
+        if self.data_source_id is not None:
+            body["data_source_id"] = self.data_source_id
+        if self.description is not None:
+            body["description"] = self.description
+        if self.name is not None:
+            body["name"] = self.name
+        if self.options:
+            body["options"] = self.options
+        if self.parent is not None:
+            body["parent"] = self.parent
+        if self.query is not None:
+            body["query"] = self.query
+        if self.run_as_role is not None:
+            body["run_as_role"] = self.run_as_role
+        if self.tags:
+            body["tags"] = self.tags
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryPostContent:
         """Deserializes the QueryPostContent from a dictionary."""
-        return cls(data_source_id=d.get('data_source_id', None),
-                   description=d.get('description', None),
-                   name=d.get('name', None),
-                   options=d.get('options', None),
-                   parent=d.get('parent', None),
-                   query=d.get('query', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
-                   tags=d.get('tags', None))
+        return cls(
+            data_source_id=d.get("data_source_id", None),
+            description=d.get("description", None),
+            name=d.get("name", None),
+            options=d.get("options", None),
+            parent=d.get("parent", None),
+            query=d.get("query", None),
+            run_as_role=_enum(d, "run_as_role", RunAsRole),
+            tags=d.get("tags", None),
+        )
 
 
 class QueryStatementType(Enum):
 
-    ALTER = 'ALTER'
-    ANALYZE = 'ANALYZE'
-    COPY = 'COPY'
-    CREATE = 'CREATE'
-    DELETE = 'DELETE'
-    DESCRIBE = 'DESCRIBE'
-    DROP = 'DROP'
-    EXPLAIN = 'EXPLAIN'
-    GRANT = 'GRANT'
-    INSERT = 'INSERT'
-    MERGE = 'MERGE'
-    OPTIMIZE = 'OPTIMIZE'
-    OTHER = 'OTHER'
-    REFRESH = 'REFRESH'
-    REPLACE = 'REPLACE'
-    REVOKE = 'REVOKE'
-    SELECT = 'SELECT'
-    SET = 'SET'
-    SHOW = 'SHOW'
-    TRUNCATE = 'TRUNCATE'
-    UPDATE = 'UPDATE'
-    USE = 'USE'
+    ALTER = "ALTER"
+    ANALYZE = "ANALYZE"
+    COPY = "COPY"
+    CREATE = "CREATE"
+    DELETE = "DELETE"
+    DESCRIBE = "DESCRIBE"
+    DROP = "DROP"
+    EXPLAIN = "EXPLAIN"
+    GRANT = "GRANT"
+    INSERT = "INSERT"
+    MERGE = "MERGE"
+    OPTIMIZE = "OPTIMIZE"
+    OTHER = "OTHER"
+    REFRESH = "REFRESH"
+    REPLACE = "REPLACE"
+    REVOKE = "REVOKE"
+    SELECT = "SELECT"
+    SET = "SET"
+    SHOW = "SHOW"
+    TRUNCATE = "TRUNCATE"
+    UPDATE = "UPDATE"
+    USE = "USE"
 
 
 class QueryStatus(Enum):
     """Statuses which are also used by OperationStatus in runtime"""
 
-    CANCELED = 'CANCELED'
-    COMPILED = 'COMPILED'
-    COMPILING = 'COMPILING'
-    FAILED = 'FAILED'
-    FINISHED = 'FINISHED'
-    QUEUED = 'QUEUED'
-    RUNNING = 'RUNNING'
-    STARTED = 'STARTED'
+    CANCELED = "CANCELED"
+    COMPILED = "COMPILED"
+    COMPILING = "COMPILING"
+    FAILED = "FAILED"
+    FINISHED = "FINISHED"
+    QUEUED = "QUEUED"
+    RUNNING = "RUNNING"
+    STARTED = "STARTED"
 
 
 @dataclass
@@ -4785,23 +5816,28 @@ class RepeatedEndpointConfPairs:
     def as_dict(self) -> dict:
         """Serializes the RepeatedEndpointConfPairs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.config_pair: body['config_pair'] = [v.as_dict() for v in self.config_pair]
+        if self.config_pair:
+            body["config_pair"] = [v.as_dict() for v in self.config_pair]
         if self.configuration_pairs:
-            body['configuration_pairs'] = [v.as_dict() for v in self.configuration_pairs]
+            body["configuration_pairs"] = [v.as_dict() for v in self.configuration_pairs]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepeatedEndpointConfPairs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.config_pair: body['config_pair'] = self.config_pair
-        if self.configuration_pairs: body['configuration_pairs'] = self.configuration_pairs
+        if self.config_pair:
+            body["config_pair"] = self.config_pair
+        if self.configuration_pairs:
+            body["configuration_pairs"] = self.configuration_pairs
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepeatedEndpointConfPairs:
         """Deserializes the RepeatedEndpointConfPairs from a dictionary."""
-        return cls(config_pair=_repeated_dict(d, 'config_pair', EndpointConfPair),
-                   configuration_pairs=_repeated_dict(d, 'configuration_pairs', EndpointConfPair))
+        return cls(
+            config_pair=_repeated_dict(d, "config_pair", EndpointConfPair),
+            configuration_pairs=_repeated_dict(d, "configuration_pairs", EndpointConfPair),
+        )
 
 
 @dataclass
@@ -4857,42 +5893,58 @@ class ResultData:
     def as_dict(self) -> dict:
         """Serializes the ResultData into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.byte_count is not None: body['byte_count'] = self.byte_count
-        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
-        if self.data_array: body['data_array'] = [v for v in self.data_array]
-        if self.external_links: body['external_links'] = [v.as_dict() for v in self.external_links]
-        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.byte_count is not None:
+            body["byte_count"] = self.byte_count
+        if self.chunk_index is not None:
+            body["chunk_index"] = self.chunk_index
+        if self.data_array:
+            body["data_array"] = [v for v in self.data_array]
+        if self.external_links:
+            body["external_links"] = [v.as_dict() for v in self.external_links]
+        if self.next_chunk_index is not None:
+            body["next_chunk_index"] = self.next_chunk_index
         if self.next_chunk_internal_link is not None:
-            body['next_chunk_internal_link'] = self.next_chunk_internal_link
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.row_offset is not None: body['row_offset'] = self.row_offset
+            body["next_chunk_internal_link"] = self.next_chunk_internal_link
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.row_offset is not None:
+            body["row_offset"] = self.row_offset
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResultData into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.byte_count is not None: body['byte_count'] = self.byte_count
-        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
-        if self.data_array: body['data_array'] = self.data_array
-        if self.external_links: body['external_links'] = self.external_links
-        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.byte_count is not None:
+            body["byte_count"] = self.byte_count
+        if self.chunk_index is not None:
+            body["chunk_index"] = self.chunk_index
+        if self.data_array:
+            body["data_array"] = self.data_array
+        if self.external_links:
+            body["external_links"] = self.external_links
+        if self.next_chunk_index is not None:
+            body["next_chunk_index"] = self.next_chunk_index
         if self.next_chunk_internal_link is not None:
-            body['next_chunk_internal_link'] = self.next_chunk_internal_link
-        if self.row_count is not None: body['row_count'] = self.row_count
-        if self.row_offset is not None: body['row_offset'] = self.row_offset
+            body["next_chunk_internal_link"] = self.next_chunk_internal_link
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
+        if self.row_offset is not None:
+            body["row_offset"] = self.row_offset
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultData:
         """Deserializes the ResultData from a dictionary."""
-        return cls(byte_count=d.get('byte_count', None),
-                   chunk_index=d.get('chunk_index', None),
-                   data_array=d.get('data_array', None),
-                   external_links=_repeated_dict(d, 'external_links', ExternalLink),
-                   next_chunk_index=d.get('next_chunk_index', None),
-                   next_chunk_internal_link=d.get('next_chunk_internal_link', None),
-                   row_count=d.get('row_count', None),
-                   row_offset=d.get('row_offset', None))
+        return cls(
+            byte_count=d.get("byte_count", None),
+            chunk_index=d.get("chunk_index", None),
+            data_array=d.get("data_array", None),
+            external_links=_repeated_dict(d, "external_links", ExternalLink),
+            next_chunk_index=d.get("next_chunk_index", None),
+            next_chunk_internal_link=d.get("next_chunk_internal_link", None),
+            row_count=d.get("row_count", None),
+            row_offset=d.get("row_offset", None),
+        )
 
 
 @dataclass
@@ -4923,37 +5975,53 @@ class ResultManifest:
     def as_dict(self) -> dict:
         """Serializes the ResultManifest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.chunks: body['chunks'] = [v.as_dict() for v in self.chunks]
-        if self.format is not None: body['format'] = self.format.value
-        if self.schema: body['schema'] = self.schema.as_dict()
-        if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count
-        if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count
-        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.chunks:
+            body["chunks"] = [v.as_dict() for v in self.chunks]
+        if self.format is not None:
+            body["format"] = self.format.value
+        if self.schema:
+            body["schema"] = self.schema.as_dict()
+        if self.total_byte_count is not None:
+            body["total_byte_count"] = self.total_byte_count
+        if self.total_chunk_count is not None:
+            body["total_chunk_count"] = self.total_chunk_count
+        if self.total_row_count is not None:
+            body["total_row_count"] = self.total_row_count
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResultManifest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.chunks: body['chunks'] = self.chunks
-        if self.format is not None: body['format'] = self.format
-        if self.schema: body['schema'] = self.schema
-        if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count
-        if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count
-        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
-        if self.truncated is not None: body['truncated'] = self.truncated
+        if self.chunks:
+            body["chunks"] = self.chunks
+        if self.format is not None:
+            body["format"] = self.format
+        if self.schema:
+            body["schema"] = self.schema
+        if self.total_byte_count is not None:
+            body["total_byte_count"] = self.total_byte_count
+        if self.total_chunk_count is not None:
+            body["total_chunk_count"] = self.total_chunk_count
+        if self.total_row_count is not None:
+            body["total_row_count"] = self.total_row_count
+        if self.truncated is not None:
+            body["truncated"] = self.truncated
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultManifest:
         """Deserializes the ResultManifest from a dictionary."""
-        return cls(chunks=_repeated_dict(d, 'chunks', BaseChunkInfo),
-                   format=_enum(d, 'format', Format),
-                   schema=_from_dict(d, 'schema', ResultSchema),
-                   total_byte_count=d.get('total_byte_count', None),
-                   total_chunk_count=d.get('total_chunk_count', None),
-                   total_row_count=d.get('total_row_count', None),
-                   truncated=d.get('truncated', None))
+        return cls(
+            chunks=_repeated_dict(d, "chunks", BaseChunkInfo),
+            format=_enum(d, "format", Format),
+            schema=_from_dict(d, "schema", ResultSchema),
+            total_byte_count=d.get("total_byte_count", None),
+            total_chunk_count=d.get("total_chunk_count", None),
+            total_row_count=d.get("total_row_count", None),
+            truncated=d.get("truncated", None),
+        )
 
 
 @dataclass
@@ -4967,35 +6035,42 @@ class ResultSchema:
     def as_dict(self) -> dict:
         """Serializes the ResultSchema into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.column_count is not None: body['column_count'] = self.column_count
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.column_count is not None:
+            body["column_count"] = self.column_count
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResultSchema into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.column_count is not None: body['column_count'] = self.column_count
-        if self.columns: body['columns'] = self.columns
+        if self.column_count is not None:
+            body["column_count"] = self.column_count
+        if self.columns:
+            body["columns"] = self.columns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultSchema:
         """Deserializes the ResultSchema from a dictionary."""
-        return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo))
+        return cls(
+            column_count=d.get("column_count", None),
+            columns=_repeated_dict(d, "columns", ColumnInfo),
+        )
 
 
 class RunAsMode(Enum):
 
-    OWNER = 'OWNER'
-    VIEWER = 'VIEWER'
+    OWNER = "OWNER"
+    VIEWER = "VIEWER"
 
 
 class RunAsRole(Enum):
     """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
     viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
 
-    OWNER = 'owner'
-    VIEWER = 'viewer'
+    OWNER = "owner"
+    VIEWER = "viewer"
 
 
 @dataclass
@@ -5008,39 +6083,46 @@ class ServiceError:
     def as_dict(self) -> dict:
         """Serializes the ServiceError into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error_code is not None: body['error_code'] = self.error_code.value
-        if self.message is not None: body['message'] = self.message
+        if self.error_code is not None:
+            body["error_code"] = self.error_code.value
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ServiceError into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error_code is not None: body['error_code'] = self.error_code
-        if self.message is not None: body['message'] = self.message
+        if self.error_code is not None:
+            body["error_code"] = self.error_code
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServiceError:
         """Deserializes the ServiceError from a dictionary."""
-        return cls(error_code=_enum(d, 'error_code', ServiceErrorCode), message=d.get('message', None))
+        return cls(
+            error_code=_enum(d, "error_code", ServiceErrorCode),
+            message=d.get("message", None),
+        )
 
 
 class ServiceErrorCode(Enum):
 
-    ABORTED = 'ABORTED'
-    ALREADY_EXISTS = 'ALREADY_EXISTS'
-    BAD_REQUEST = 'BAD_REQUEST'
-    CANCELLED = 'CANCELLED'
-    DEADLINE_EXCEEDED = 'DEADLINE_EXCEEDED'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    IO_ERROR = 'IO_ERROR'
-    NOT_FOUND = 'NOT_FOUND'
-    RESOURCE_EXHAUSTED = 'RESOURCE_EXHAUSTED'
-    SERVICE_UNDER_MAINTENANCE = 'SERVICE_UNDER_MAINTENANCE'
-    TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE'
-    UNAUTHENTICATED = 'UNAUTHENTICATED'
-    UNKNOWN = 'UNKNOWN'
-    WORKSPACE_TEMPORARILY_UNAVAILABLE = 'WORKSPACE_TEMPORARILY_UNAVAILABLE'
+    ABORTED = "ABORTED"
+    ALREADY_EXISTS = "ALREADY_EXISTS"
+    BAD_REQUEST = "BAD_REQUEST"
+    CANCELLED = "CANCELLED"
+    DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    IO_ERROR = "IO_ERROR"
+    NOT_FOUND = "NOT_FOUND"
+    RESOURCE_EXHAUSTED = "RESOURCE_EXHAUSTED"
+    SERVICE_UNDER_MAINTENANCE = "SERVICE_UNDER_MAINTENANCE"
+    TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE"
+    UNAUTHENTICATED = "UNAUTHENTICATED"
+    UNKNOWN = "UNKNOWN"
+    WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE"
 
 
 @dataclass
@@ -5057,25 +6139,32 @@ def as_dict(self) -> dict:
         """Serializes the SetResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type.value
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetResponse:
         """Deserializes the SetResponse from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl),
-                   object_id=d.get('object_id', None),
-                   object_type=_enum(d, 'object_type', ObjectType))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", AccessControl),
+            object_id=d.get("object_id", None),
+            object_type=_enum(d, "object_type", ObjectType),
+        )
 
 
 @dataclass
@@ -5115,59 +6204,75 @@ class SetWorkspaceWarehouseConfigRequest:
     def as_dict(self) -> dict:
         """Serializes the SetWorkspaceWarehouseConfigRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.channel: body['channel'] = self.channel.as_dict()
-        if self.config_param: body['config_param'] = self.config_param.as_dict()
+        if self.channel:
+            body["channel"] = self.channel.as_dict()
+        if self.config_param:
+            body["config_param"] = self.config_param.as_dict()
         if self.data_access_config:
-            body['data_access_config'] = [v.as_dict() for v in self.data_access_config]
+            body["data_access_config"] = [v.as_dict() for v in self.data_access_config]
         if self.enabled_warehouse_types:
-            body['enabled_warehouse_types'] = [v.as_dict() for v in self.enabled_warehouse_types]
-        if self.global_param: body['global_param'] = self.global_param.as_dict()
+            body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types]
+        if self.global_param:
+            body["global_param"] = self.global_param.as_dict()
         if self.google_service_account is not None:
-            body['google_service_account'] = self.google_service_account
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.security_policy is not None: body['security_policy'] = self.security_policy.value
+            body["google_service_account"] = self.google_service_account
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.security_policy is not None:
+            body["security_policy"] = self.security_policy.value
         if self.sql_configuration_parameters:
-            body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict()
+            body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.channel: body['channel'] = self.channel
-        if self.config_param: body['config_param'] = self.config_param
-        if self.data_access_config: body['data_access_config'] = self.data_access_config
-        if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types
-        if self.global_param: body['global_param'] = self.global_param
+        if self.channel:
+            body["channel"] = self.channel
+        if self.config_param:
+            body["config_param"] = self.config_param
+        if self.data_access_config:
+            body["data_access_config"] = self.data_access_config
+        if self.enabled_warehouse_types:
+            body["enabled_warehouse_types"] = self.enabled_warehouse_types
+        if self.global_param:
+            body["global_param"] = self.global_param
         if self.google_service_account is not None:
-            body['google_service_account'] = self.google_service_account
-        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
-        if self.security_policy is not None: body['security_policy'] = self.security_policy
+            body["google_service_account"] = self.google_service_account
+        if self.instance_profile_arn is not None:
+            body["instance_profile_arn"] = self.instance_profile_arn
+        if self.security_policy is not None:
+            body["security_policy"] = self.security_policy
         if self.sql_configuration_parameters:
-            body['sql_configuration_parameters'] = self.sql_configuration_parameters
+            body["sql_configuration_parameters"] = self.sql_configuration_parameters
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigRequest:
         """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary."""
-        return cls(channel=_from_dict(d, 'channel', Channel),
-                   config_param=_from_dict(d, 'config_param', RepeatedEndpointConfPairs),
-                   data_access_config=_repeated_dict(d, 'data_access_config', EndpointConfPair),
-                   enabled_warehouse_types=_repeated_dict(d, 'enabled_warehouse_types', WarehouseTypePair),
-                   global_param=_from_dict(d, 'global_param', RepeatedEndpointConfPairs),
-                   google_service_account=d.get('google_service_account', None),
-                   instance_profile_arn=d.get('instance_profile_arn', None),
-                   security_policy=_enum(d, 'security_policy',
-                                         SetWorkspaceWarehouseConfigRequestSecurityPolicy),
-                   sql_configuration_parameters=_from_dict(d, 'sql_configuration_parameters',
-                                                           RepeatedEndpointConfPairs))
+        return cls(
+            channel=_from_dict(d, "channel", Channel),
+            config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs),
+            data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair),
+            enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair),
+            global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs),
+            google_service_account=d.get("google_service_account", None),
+            instance_profile_arn=d.get("instance_profile_arn", None),
+            security_policy=_enum(
+                d,
+                "security_policy",
+                SetWorkspaceWarehouseConfigRequestSecurityPolicy,
+            ),
+            sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs),
+        )
 
 
 class SetWorkspaceWarehouseConfigRequestSecurityPolicy(Enum):
     """Security policy for warehouses"""
 
-    DATA_ACCESS_CONTROL = 'DATA_ACCESS_CONTROL'
-    NONE = 'NONE'
-    PASSTHROUGH = 'PASSTHROUGH'
+    DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL"
+    NONE = "NONE"
+    PASSTHROUGH = "PASSTHROUGH"
 
 
 @dataclass
@@ -5192,9 +6297,9 @@ def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigResponse:
 class SpotInstancePolicy(Enum):
     """Configurations whether the warehouse should use spot instances."""
 
-    COST_OPTIMIZED = 'COST_OPTIMIZED'
-    POLICY_UNSPECIFIED = 'POLICY_UNSPECIFIED'
-    RELIABILITY_OPTIMIZED = 'RELIABILITY_OPTIMIZED'
+    COST_OPTIMIZED = "COST_OPTIMIZED"
+    POLICY_UNSPECIFIED = "POLICY_UNSPECIFIED"
+    RELIABILITY_OPTIMIZED = "RELIABILITY_OPTIMIZED"
 
 
 @dataclass
@@ -5219,12 +6324,12 @@ def from_dict(cls, d: Dict[str, any]) -> StartWarehouseResponse:
 class State(Enum):
     """State of the warehouse"""
 
-    DELETED = 'DELETED'
-    DELETING = 'DELETING'
-    RUNNING = 'RUNNING'
-    STARTING = 'STARTING'
-    STOPPED = 'STOPPED'
-    STOPPING = 'STOPPING'
+    DELETED = "DELETED"
+    DELETING = "DELETING"
+    RUNNING = "RUNNING"
+    STARTING = "STARTING"
+    STOPPED = "STOPPED"
+    STOPPING = "STOPPING"
 
 
 @dataclass
@@ -5246,23 +6351,33 @@ class StatementParameterListItem:
     def as_dict(self) -> dict:
         """Serializes the StatementParameterListItem into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.type is not None: body['type'] = self.type
-        if self.value is not None: body['value'] = self.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type is not None:
+            body["type"] = self.type
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StatementParameterListItem into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.type is not None: body['type'] = self.type
-        if self.value is not None: body['value'] = self.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.type is not None:
+            body["type"] = self.type
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem:
         """Deserializes the StatementParameterListItem from a dictionary."""
-        return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None))
+        return cls(
+            name=d.get("name", None),
+            type=d.get("type", None),
+            value=d.get("value", None),
+        )
 
 
 @dataclass
@@ -5282,28 +6397,38 @@ class StatementResponse:
     def as_dict(self) -> dict:
         """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.manifest: body['manifest'] = self.manifest.as_dict()
-        if self.result: body['result'] = self.result.as_dict()
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.status: body['status'] = self.status.as_dict()
+        if self.manifest:
+            body["manifest"] = self.manifest.as_dict()
+        if self.result:
+            body["result"] = self.result.as_dict()
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
+        if self.status:
+            body["status"] = self.status.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StatementResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.manifest: body['manifest'] = self.manifest
-        if self.result: body['result'] = self.result
-        if self.statement_id is not None: body['statement_id'] = self.statement_id
-        if self.status: body['status'] = self.status
+        if self.manifest:
+            body["manifest"] = self.manifest
+        if self.result:
+            body["result"] = self.result
+        if self.statement_id is not None:
+            body["statement_id"] = self.statement_id
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementResponse:
         """Deserializes the StatementResponse from a dictionary."""
-        return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
-                   result=_from_dict(d, 'result', ResultData),
-                   statement_id=d.get('statement_id', None),
-                   status=_from_dict(d, 'status', StatementStatus))
+        return cls(
+            manifest=_from_dict(d, "manifest", ResultManifest),
+            result=_from_dict(d, "result", ResultData),
+            statement_id=d.get("statement_id", None),
+            status=_from_dict(d, "status", StatementStatus),
+        )
 
 
 class StatementState(Enum):
@@ -5311,14 +6436,15 @@ class StatementState(Enum):
     `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution
     failed; reason for failure described in accomanying error message - `CANCELED`: user canceled;
     can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`:
-    execution successful, and statement closed; result no longer available for fetch"""
+    execution successful, and statement closed; result no longer available for fetch
+    """
 
-    CANCELED = 'CANCELED'
-    CLOSED = 'CLOSED'
-    FAILED = 'FAILED'
-    PENDING = 'PENDING'
-    RUNNING = 'RUNNING'
-    SUCCEEDED = 'SUCCEEDED'
+    CANCELED = "CANCELED"
+    CLOSED = "CLOSED"
+    FAILED = "FAILED"
+    PENDING = "PENDING"
+    RUNNING = "RUNNING"
+    SUCCEEDED = "SUCCEEDED"
 
 
 @dataclass
@@ -5337,30 +6463,37 @@ class StatementStatus:
     def as_dict(self) -> dict:
         """Serializes the StatementStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.error: body['error'] = self.error.as_dict()
-        if self.state is not None: body['state'] = self.state.value
+        if self.error:
+            body["error"] = self.error.as_dict()
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the StatementStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.error: body['error'] = self.error
-        if self.state is not None: body['state'] = self.state
+        if self.error:
+            body["error"] = self.error
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementStatus:
         """Deserializes the StatementStatus from a dictionary."""
-        return cls(error=_from_dict(d, 'error', ServiceError), state=_enum(d, 'state', StatementState))
+        return cls(
+            error=_from_dict(d, "error", ServiceError),
+            state=_enum(d, "state", StatementState),
+        )
 
 
 class Status(Enum):
     """Health status of the warehouse."""
 
-    DEGRADED = 'DEGRADED'
-    FAILED = 'FAILED'
-    HEALTHY = 'HEALTHY'
-    STATUS_UNSPECIFIED = 'STATUS_UNSPECIFIED'
+    DEGRADED = "DEGRADED"
+    FAILED = "FAILED"
+    HEALTHY = "HEALTHY"
+    STATUS_UNSPECIFIED = "STATUS_UNSPECIFIED"
 
 
 @dataclass
@@ -5389,24 +6522,26 @@ class Success:
     def as_dict(self) -> dict:
         """Serializes the Success into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message.value
+        if self.message is not None:
+            body["message"] = self.message.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Success into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
+        if self.message is not None:
+            body["message"] = self.message
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Success:
         """Deserializes the Success from a dictionary."""
-        return cls(message=_enum(d, 'message', SuccessMessage))
+        return cls(message=_enum(d, "message", SuccessMessage))
 
 
 class SuccessMessage(Enum):
 
-    SUCCESS = 'Success'
+    SUCCESS = "Success"
 
 
 @dataclass
@@ -5423,118 +6558,126 @@ class TerminationReason:
     def as_dict(self) -> dict:
         """Serializes the TerminationReason into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.code is not None: body['code'] = self.code.value
-        if self.parameters: body['parameters'] = self.parameters
-        if self.type is not None: body['type'] = self.type.value
+        if self.code is not None:
+            body["code"] = self.code.value
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.type is not None:
+            body["type"] = self.type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TerminationReason into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.code is not None: body['code'] = self.code
-        if self.parameters: body['parameters'] = self.parameters
-        if self.type is not None: body['type'] = self.type
+        if self.code is not None:
+            body["code"] = self.code
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationReason:
         """Deserializes the TerminationReason from a dictionary."""
-        return cls(code=_enum(d, 'code', TerminationReasonCode),
-                   parameters=d.get('parameters', None),
-                   type=_enum(d, 'type', TerminationReasonType))
+        return cls(
+            code=_enum(d, "code", TerminationReasonCode),
+            parameters=d.get("parameters", None),
+            type=_enum(d, "type", TerminationReasonType),
+        )
 
 
 class TerminationReasonCode(Enum):
     """status code indicating why the cluster was terminated"""
 
-    ABUSE_DETECTED = 'ABUSE_DETECTED'
-    ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE'
-    AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE'
-    AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = 'AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE'
-    AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE'
-    AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE'
-    AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED'
-    AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE'
-    AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE'
-    AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE'
-    AZURE_INVALID_DEPLOYMENT_TEMPLATE = 'AZURE_INVALID_DEPLOYMENT_TEMPLATE'
-    AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = 'AZURE_OPERATION_NOT_ALLOWED_EXCEPTION'
-    AZURE_QUOTA_EXCEEDED_EXCEPTION = 'AZURE_QUOTA_EXCEEDED_EXCEPTION'
-    AZURE_RESOURCE_MANAGER_THROTTLING = 'AZURE_RESOURCE_MANAGER_THROTTLING'
-    AZURE_RESOURCE_PROVIDER_THROTTLING = 'AZURE_RESOURCE_PROVIDER_THROTTLING'
-    AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = 'AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE'
-    AZURE_VM_EXTENSION_FAILURE = 'AZURE_VM_EXTENSION_FAILURE'
-    AZURE_VNET_CONFIGURATION_FAILURE = 'AZURE_VNET_CONFIGURATION_FAILURE'
-    BOOTSTRAP_TIMEOUT = 'BOOTSTRAP_TIMEOUT'
-    BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = 'BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION'
-    CLOUD_PROVIDER_DISK_SETUP_FAILURE = 'CLOUD_PROVIDER_DISK_SETUP_FAILURE'
-    CLOUD_PROVIDER_LAUNCH_FAILURE = 'CLOUD_PROVIDER_LAUNCH_FAILURE'
-    CLOUD_PROVIDER_RESOURCE_STOCKOUT = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT'
-    CLOUD_PROVIDER_SHUTDOWN = 'CLOUD_PROVIDER_SHUTDOWN'
-    COMMUNICATION_LOST = 'COMMUNICATION_LOST'
-    CONTAINER_LAUNCH_FAILURE = 'CONTAINER_LAUNCH_FAILURE'
-    CONTROL_PLANE_REQUEST_FAILURE = 'CONTROL_PLANE_REQUEST_FAILURE'
-    DATABASE_CONNECTION_FAILURE = 'DATABASE_CONNECTION_FAILURE'
-    DBFS_COMPONENT_UNHEALTHY = 'DBFS_COMPONENT_UNHEALTHY'
-    DOCKER_IMAGE_PULL_FAILURE = 'DOCKER_IMAGE_PULL_FAILURE'
-    DRIVER_UNREACHABLE = 'DRIVER_UNREACHABLE'
-    DRIVER_UNRESPONSIVE = 'DRIVER_UNRESPONSIVE'
-    EXECUTION_COMPONENT_UNHEALTHY = 'EXECUTION_COMPONENT_UNHEALTHY'
-    GCP_QUOTA_EXCEEDED = 'GCP_QUOTA_EXCEEDED'
-    GCP_SERVICE_ACCOUNT_DELETED = 'GCP_SERVICE_ACCOUNT_DELETED'
-    GLOBAL_INIT_SCRIPT_FAILURE = 'GLOBAL_INIT_SCRIPT_FAILURE'
-    HIVE_METASTORE_PROVISIONING_FAILURE = 'HIVE_METASTORE_PROVISIONING_FAILURE'
-    IMAGE_PULL_PERMISSION_DENIED = 'IMAGE_PULL_PERMISSION_DENIED'
-    INACTIVITY = 'INACTIVITY'
-    INIT_SCRIPT_FAILURE = 'INIT_SCRIPT_FAILURE'
-    INSTANCE_POOL_CLUSTER_FAILURE = 'INSTANCE_POOL_CLUSTER_FAILURE'
-    INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE'
-    INTERNAL_ERROR = 'INTERNAL_ERROR'
-    INVALID_ARGUMENT = 'INVALID_ARGUMENT'
-    INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE'
-    IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE'
-    JOB_FINISHED = 'JOB_FINISHED'
-    K8S_AUTOSCALING_FAILURE = 'K8S_AUTOSCALING_FAILURE'
-    K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = 'K8S_DBR_CLUSTER_LAUNCH_TIMEOUT'
-    METASTORE_COMPONENT_UNHEALTHY = 'METASTORE_COMPONENT_UNHEALTHY'
-    NEPHOS_RESOURCE_MANAGEMENT = 'NEPHOS_RESOURCE_MANAGEMENT'
-    NETWORK_CONFIGURATION_FAILURE = 'NETWORK_CONFIGURATION_FAILURE'
-    NFS_MOUNT_FAILURE = 'NFS_MOUNT_FAILURE'
-    NPIP_TUNNEL_SETUP_FAILURE = 'NPIP_TUNNEL_SETUP_FAILURE'
-    NPIP_TUNNEL_TOKEN_FAILURE = 'NPIP_TUNNEL_TOKEN_FAILURE'
-    REQUEST_REJECTED = 'REQUEST_REJECTED'
-    REQUEST_THROTTLED = 'REQUEST_THROTTLED'
-    SECRET_RESOLUTION_ERROR = 'SECRET_RESOLUTION_ERROR'
-    SECURITY_DAEMON_REGISTRATION_EXCEPTION = 'SECURITY_DAEMON_REGISTRATION_EXCEPTION'
-    SELF_BOOTSTRAP_FAILURE = 'SELF_BOOTSTRAP_FAILURE'
-    SKIPPED_SLOW_NODES = 'SKIPPED_SLOW_NODES'
-    SLOW_IMAGE_DOWNLOAD = 'SLOW_IMAGE_DOWNLOAD'
-    SPARK_ERROR = 'SPARK_ERROR'
-    SPARK_IMAGE_DOWNLOAD_FAILURE = 'SPARK_IMAGE_DOWNLOAD_FAILURE'
-    SPARK_STARTUP_FAILURE = 'SPARK_STARTUP_FAILURE'
-    SPOT_INSTANCE_TERMINATION = 'SPOT_INSTANCE_TERMINATION'
-    STORAGE_DOWNLOAD_FAILURE = 'STORAGE_DOWNLOAD_FAILURE'
-    STS_CLIENT_SETUP_FAILURE = 'STS_CLIENT_SETUP_FAILURE'
-    SUBNET_EXHAUSTED_FAILURE = 'SUBNET_EXHAUSTED_FAILURE'
-    TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE'
-    TRIAL_EXPIRED = 'TRIAL_EXPIRED'
-    UNEXPECTED_LAUNCH_FAILURE = 'UNEXPECTED_LAUNCH_FAILURE'
-    UNKNOWN = 'UNKNOWN'
-    UNSUPPORTED_INSTANCE_TYPE = 'UNSUPPORTED_INSTANCE_TYPE'
-    UPDATE_INSTANCE_PROFILE_FAILURE = 'UPDATE_INSTANCE_PROFILE_FAILURE'
-    USER_REQUEST = 'USER_REQUEST'
-    WORKER_SETUP_FAILURE = 'WORKER_SETUP_FAILURE'
-    WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR'
-    WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR'
+    ABUSE_DETECTED = "ABUSE_DETECTED"
+    ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE"
+    AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE"
+    AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE"
+    AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE"
+    AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE"
+    AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED"
+    AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE"
+    AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE"
+    AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE"
+    AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE"
+    AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION"
+    AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION"
+    AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING"
+    AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING"
+    AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE"
+    AZURE_VM_EXTENSION_FAILURE = "AZURE_VM_EXTENSION_FAILURE"
+    AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE"
+    BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT"
+    BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION"
+    CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE"
+    CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE"
+    CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT"
+    CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN"
+    COMMUNICATION_LOST = "COMMUNICATION_LOST"
+    CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE"
+    CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE"
+    DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE"
+    DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY"
+    DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE"
+    DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE"
+    DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE"
+    EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY"
+    GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED"
+    GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED"
+    GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE"
+    HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE"
+    IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED"
+    INACTIVITY = "INACTIVITY"
+    INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE"
+    INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE"
+    INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE"
+    INTERNAL_ERROR = "INTERNAL_ERROR"
+    INVALID_ARGUMENT = "INVALID_ARGUMENT"
+    INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE"
+    IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE"
+    JOB_FINISHED = "JOB_FINISHED"
+    K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE"
+    K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT"
+    METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY"
+    NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT"
+    NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE"
+    NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE"
+    NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE"
+    NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE"
+    REQUEST_REJECTED = "REQUEST_REJECTED"
+    REQUEST_THROTTLED = "REQUEST_THROTTLED"
+    SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR"
+    SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION"
+    SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE"
+    SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES"
+    SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD"
+    SPARK_ERROR = "SPARK_ERROR"
+    SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE"
+    SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE"
+    SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION"
+    STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE"
+    STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE"
+    SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE"
+    TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE"
+    TRIAL_EXPIRED = "TRIAL_EXPIRED"
+    UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE"
+    UNKNOWN = "UNKNOWN"
+    UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE"
+    UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE"
+    USER_REQUEST = "USER_REQUEST"
+    WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE"
+    WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR"
+    WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR"
 
 
 class TerminationReasonType(Enum):
     """type of the termination"""
 
-    CLIENT_ERROR = 'CLIENT_ERROR'
-    CLOUD_FAILURE = 'CLOUD_FAILURE'
-    SERVICE_FAULT = 'SERVICE_FAULT'
-    SUCCESS = 'SUCCESS'
+    CLIENT_ERROR = "CLIENT_ERROR"
+    CLOUD_FAILURE = "CLOUD_FAILURE"
+    SERVICE_FAULT = "SERVICE_FAULT"
+    SUCCESS = "SUCCESS"
 
 
 @dataclass
@@ -5544,19 +6687,21 @@ class TextValue:
     def as_dict(self) -> dict:
         """Serializes the TextValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TextValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.value is not None: body['value'] = self.value
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TextValue:
         """Deserializes the TextValue from a dictionary."""
-        return cls(value=d.get('value', None))
+        return cls(value=d.get("value", None))
 
 
 @dataclass
@@ -5570,21 +6715,28 @@ class TimeRange:
     def as_dict(self) -> dict:
         """Serializes the TimeRange into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
-        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
+        if self.end_time_ms is not None:
+            body["end_time_ms"] = self.end_time_ms
+        if self.start_time_ms is not None:
+            body["start_time_ms"] = self.start_time_ms
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TimeRange into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
-        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
+        if self.end_time_ms is not None:
+            body["end_time_ms"] = self.end_time_ms
+        if self.start_time_ms is not None:
+            body["start_time_ms"] = self.start_time_ms
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TimeRange:
         """Deserializes the TimeRange from a dictionary."""
-        return cls(end_time_ms=d.get('end_time_ms', None), start_time_ms=d.get('start_time_ms', None))
+        return cls(
+            end_time_ms=d.get("end_time_ms", None),
+            start_time_ms=d.get("start_time_ms", None),
+        )
 
 
 @dataclass
@@ -5595,19 +6747,21 @@ class TransferOwnershipObjectId:
     def as_dict(self) -> dict:
         """Serializes the TransferOwnershipObjectId into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.new_owner is not None: body['new_owner'] = self.new_owner
+        if self.new_owner is not None:
+            body["new_owner"] = self.new_owner
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the TransferOwnershipObjectId into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.new_owner is not None: body['new_owner'] = self.new_owner
+        if self.new_owner is not None:
+            body["new_owner"] = self.new_owner
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId:
         """Deserializes the TransferOwnershipObjectId from a dictionary."""
-        return cls(new_owner=d.get('new_owner', None))
+        return cls(new_owner=d.get("new_owner", None))
 
 
 @dataclass
@@ -5630,25 +6784,33 @@ class UpdateAlertRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.alert: body['alert'] = self.alert.as_dict()
-        if self.id is not None: body['id'] = self.id
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.alert:
+            body["alert"] = self.alert.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.alert: body['alert'] = self.alert
-        if self.id is not None: body['id'] = self.id
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.alert:
+            body["alert"] = self.alert
+        if self.id is not None:
+            body["id"] = self.id
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest:
         """Deserializes the UpdateAlertRequest from a dictionary."""
-        return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert),
-                   id=d.get('id', None),
-                   update_mask=d.get('update_mask', None))
+        return cls(
+            alert=_from_dict(d, "alert", UpdateAlertRequestAlert),
+            id=d.get("id", None),
+            update_mask=d.get("update_mask", None),
+        )
 
 
 @dataclass
@@ -5686,40 +6848,58 @@ class UpdateAlertRequestAlert:
     def as_dict(self) -> dict:
         """Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.condition: body['condition'] = self.condition.as_dict()
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.condition:
+            body["condition"] = self.condition.as_dict()
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateAlertRequestAlert into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.condition: body['condition'] = self.condition
-        if self.custom_body is not None: body['custom_body'] = self.custom_body
-        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.condition:
+            body["condition"] = self.condition
+        if self.custom_body is not None:
+            body["custom_body"] = self.custom_body
+        if self.custom_subject is not None:
+            body["custom_subject"] = self.custom_subject
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.notify_on_ok is not None:
+            body["notify_on_ok"] = self.notify_on_ok
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.seconds_to_retrigger is not None:
+            body["seconds_to_retrigger"] = self.seconds_to_retrigger
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
         """Deserializes the UpdateAlertRequestAlert from a dictionary."""
-        return cls(condition=_from_dict(d, 'condition', AlertCondition),
-                   custom_body=d.get('custom_body', None),
-                   custom_subject=d.get('custom_subject', None),
-                   display_name=d.get('display_name', None),
-                   notify_on_ok=d.get('notify_on_ok', None),
-                   owner_user_name=d.get('owner_user_name', None),
-                   query_id=d.get('query_id', None),
-                   seconds_to_retrigger=d.get('seconds_to_retrigger', None))
+        return cls(
+            condition=_from_dict(d, "condition", AlertCondition),
+            custom_body=d.get("custom_body", None),
+            custom_subject=d.get("custom_subject", None),
+            display_name=d.get("display_name", None),
+            notify_on_ok=d.get("notify_on_ok", None),
+            owner_user_name=d.get("owner_user_name", None),
+            query_id=d.get("query_id", None),
+            seconds_to_retrigger=d.get("seconds_to_retrigger", None),
+        )
 
 
 @dataclass
@@ -5742,25 +6922,33 @@ class UpdateQueryRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.query: body['query'] = self.query.as_dict()
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.id is not None:
+            body["id"] = self.id
+        if self.query:
+            body["query"] = self.query.as_dict()
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.query: body['query'] = self.query
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.id is not None:
+            body["id"] = self.id
+        if self.query:
+            body["query"] = self.query
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest:
         """Deserializes the UpdateQueryRequest from a dictionary."""
-        return cls(id=d.get('id', None),
-                   query=_from_dict(d, 'query', UpdateQueryRequestQuery),
-                   update_mask=d.get('update_mask', None))
+        return cls(
+            id=d.get("id", None),
+            query=_from_dict(d, "query", UpdateQueryRequestQuery),
+            update_mask=d.get("update_mask", None),
+        )
 
 
 @dataclass
@@ -5800,49 +6988,73 @@ class UpdateQueryRequestQuery:
     def as_dict(self) -> dict:
         """Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = [v for v in self.tags]
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parameters:
+            body["parameters"] = [v.as_dict() for v in self.parameters]
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode.value
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = [v for v in self.tags]
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateQueryRequestQuery into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = self.parameters
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
-        if self.schema is not None: body['schema'] = self.schema
-        if self.tags: body['tags'] = self.tags
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.apply_auto_limit is not None:
+            body["apply_auto_limit"] = self.apply_auto_limit
+        if self.catalog is not None:
+            body["catalog"] = self.catalog
+        if self.description is not None:
+            body["description"] = self.description
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.owner_user_name is not None:
+            body["owner_user_name"] = self.owner_user_name
+        if self.parameters:
+            body["parameters"] = self.parameters
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.run_as_mode is not None:
+            body["run_as_mode"] = self.run_as_mode
+        if self.schema is not None:
+            body["schema"] = self.schema
+        if self.tags:
+            body["tags"] = self.tags
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery:
         """Deserializes the UpdateQueryRequestQuery from a dictionary."""
-        return cls(apply_auto_limit=d.get('apply_auto_limit', None),
-                   catalog=d.get('catalog', None),
-                   description=d.get('description', None),
-                   display_name=d.get('display_name', None),
-                   owner_user_name=d.get('owner_user_name', None),
-                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
-                   query_text=d.get('query_text', None),
-                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
-                   schema=d.get('schema', None),
-                   tags=d.get('tags', None),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            apply_auto_limit=d.get("apply_auto_limit", None),
+            catalog=d.get("catalog", None),
+            description=d.get("description", None),
+            display_name=d.get("display_name", None),
+            owner_user_name=d.get("owner_user_name", None),
+            parameters=_repeated_dict(d, "parameters", QueryParameter),
+            query_text=d.get("query_text", None),
+            run_as_mode=_enum(d, "run_as_mode", RunAsMode),
+            schema=d.get("schema", None),
+            tags=d.get("tags", None),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -5884,25 +7096,33 @@ class UpdateVisualizationRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
-        if self.visualization: body['visualization'] = self.visualization.as_dict()
+        if self.id is not None:
+            body["id"] = self.id
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
+        if self.visualization:
+            body["visualization"] = self.visualization.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.update_mask is not None: body['update_mask'] = self.update_mask
-        if self.visualization: body['visualization'] = self.visualization
+        if self.id is not None:
+            body["id"] = self.id
+        if self.update_mask is not None:
+            body["update_mask"] = self.update_mask
+        if self.visualization:
+            body["visualization"] = self.visualization
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest:
         """Deserializes the UpdateVisualizationRequest from a dictionary."""
-        return cls(id=d.get('id', None),
-                   update_mask=d.get('update_mask', None),
-                   visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization))
+        return cls(
+            id=d.get("id", None),
+            update_mask=d.get("update_mask", None),
+            visualization=_from_dict(d, "visualization", UpdateVisualizationRequestVisualization),
+        )
 
 
 @dataclass
@@ -5924,28 +7144,38 @@ class UpdateVisualizationRequestVisualization:
     def as_dict(self) -> dict:
         """Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
-        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
-        if self.type is not None: body['type'] = self.type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.serialized_options is not None:
+            body["serialized_options"] = self.serialized_options
+        if self.serialized_query_plan is not None:
+            body["serialized_query_plan"] = self.serialized_query_plan
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
-        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
-        if self.type is not None: body['type'] = self.type
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.serialized_options is not None:
+            body["serialized_options"] = self.serialized_options
+        if self.serialized_query_plan is not None:
+            body["serialized_query_plan"] = self.serialized_query_plan
+        if self.type is not None:
+            body["type"] = self.type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization:
         """Deserializes the UpdateVisualizationRequestVisualization from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   serialized_options=d.get('serialized_options', None),
-                   serialized_query_plan=d.get('serialized_query_plan', None),
-                   type=d.get('type', None))
+        return cls(
+            display_name=d.get("display_name", None),
+            serialized_options=d.get("serialized_options", None),
+            serialized_query_plan=d.get("serialized_query_plan", None),
+            type=d.get("type", None),
+        )
 
 
 @dataclass
@@ -5959,23 +7189,33 @@ class User:
     def as_dict(self) -> dict:
         """Serializes the User into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.email is not None: body['email'] = self.email
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
+        if self.email is not None:
+            body["email"] = self.email
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the User into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.email is not None: body['email'] = self.email
-        if self.id is not None: body['id'] = self.id
-        if self.name is not None: body['name'] = self.name
+        if self.email is not None:
+            body["email"] = self.email
+        if self.id is not None:
+            body["id"] = self.id
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> User:
         """Deserializes the User from a dictionary."""
-        return cls(email=d.get('email', None), id=d.get('id', None), name=d.get('name', None))
+        return cls(
+            email=d.get("email", None),
+            id=d.get("id", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -6009,40 +7249,58 @@ class Visualization:
     def as_dict(self) -> dict:
         """Serializes the Visualization into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
-        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
-        if self.type is not None: body['type'] = self.type
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.serialized_options is not None:
+            body["serialized_options"] = self.serialized_options
+        if self.serialized_query_plan is not None:
+            body["serialized_query_plan"] = self.serialized_query_plan
+        if self.type is not None:
+            body["type"] = self.type
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Visualization into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.create_time is not None: body['create_time'] = self.create_time
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.id is not None: body['id'] = self.id
-        if self.query_id is not None: body['query_id'] = self.query_id
-        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
-        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
-        if self.type is not None: body['type'] = self.type
-        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.create_time is not None:
+            body["create_time"] = self.create_time
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.id is not None:
+            body["id"] = self.id
+        if self.query_id is not None:
+            body["query_id"] = self.query_id
+        if self.serialized_options is not None:
+            body["serialized_options"] = self.serialized_options
+        if self.serialized_query_plan is not None:
+            body["serialized_query_plan"] = self.serialized_query_plan
+        if self.type is not None:
+            body["type"] = self.type
+        if self.update_time is not None:
+            body["update_time"] = self.update_time
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Visualization:
         """Deserializes the Visualization from a dictionary."""
-        return cls(create_time=d.get('create_time', None),
-                   display_name=d.get('display_name', None),
-                   id=d.get('id', None),
-                   query_id=d.get('query_id', None),
-                   serialized_options=d.get('serialized_options', None),
-                   serialized_query_plan=d.get('serialized_query_plan', None),
-                   type=d.get('type', None),
-                   update_time=d.get('update_time', None))
+        return cls(
+            create_time=d.get("create_time", None),
+            display_name=d.get("display_name", None),
+            id=d.get("id", None),
+            query_id=d.get("query_id", None),
+            serialized_options=d.get("serialized_options", None),
+            serialized_query_plan=d.get("serialized_query_plan", None),
+            type=d.get("type", None),
+            update_time=d.get("update_time", None),
+        )
 
 
 @dataclass
@@ -6062,30 +7320,38 @@ class WarehouseAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the WarehouseAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehouseAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlRequest:
         """Deserializes the WarehouseAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', WarehousePermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", WarehousePermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -6108,33 +7374,43 @@ class WarehouseAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the WarehouseAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehouseAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlResponse:
         """Deserializes the WarehouseAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', WarehousePermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", WarehousePermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -6149,34 +7425,42 @@ class WarehousePermission:
     def as_dict(self) -> dict:
         """Serializes the WarehousePermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehousePermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermission:
         """Deserializes the WarehousePermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', WarehousePermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", WarehousePermissionLevel),
+        )
 
 
 class WarehousePermissionLevel(Enum):
     """Permission level"""
 
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_MONITOR = 'CAN_MONITOR'
-    CAN_USE = 'CAN_USE'
-    IS_OWNER = 'IS_OWNER'
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_MONITOR = "CAN_MONITOR"
+    CAN_USE = "CAN_USE"
+    IS_OWNER = "IS_OWNER"
 
 
 @dataclass
@@ -6191,26 +7475,32 @@ def as_dict(self) -> dict:
         """Serializes the WarehousePermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehousePermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissions:
         """Deserializes the WarehousePermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      WarehouseAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -6223,22 +7513,28 @@ class WarehousePermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the WarehousePermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehousePermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsDescription:
         """Deserializes the WarehousePermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', WarehousePermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", WarehousePermissionLevel),
+        )
 
 
 @dataclass
@@ -6252,23 +7548,27 @@ def as_dict(self) -> dict:
         """Serializes the WarehousePermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.warehouse_id is not None:
+            body["warehouse_id"] = self.warehouse_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsRequest:
         """Deserializes the WarehousePermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      WarehouseAccessControlRequest),
-                   warehouse_id=d.get('warehouse_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlRequest),
+            warehouse_id=d.get("warehouse_id", None),
+        )
 
 
 @dataclass
@@ -6283,30 +7583,36 @@ class WarehouseTypePair:
     def as_dict(self) -> dict:
         """Serializes the WarehouseTypePair into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WarehouseTypePair into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.enabled is not None: body['enabled'] = self.enabled
-        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        if self.enabled is not None:
+            body["enabled"] = self.enabled
+        if self.warehouse_type is not None:
+            body["warehouse_type"] = self.warehouse_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseTypePair:
         """Deserializes the WarehouseTypePair from a dictionary."""
-        return cls(enabled=d.get('enabled', None),
-                   warehouse_type=_enum(d, 'warehouse_type', WarehouseTypePairWarehouseType))
+        return cls(
+            enabled=d.get("enabled", None),
+            warehouse_type=_enum(d, "warehouse_type", WarehouseTypePairWarehouseType),
+        )
 
 
 class WarehouseTypePairWarehouseType(Enum):
     """Warehouse type: `PRO` or `CLASSIC`."""
 
-    CLASSIC = 'CLASSIC'
-    PRO = 'PRO'
-    TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED'
+    CLASSIC = "CLASSIC"
+    PRO = "PRO"
+    TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED"
 
 
 @dataclass
@@ -6328,28 +7634,38 @@ class Widget:
     def as_dict(self) -> dict:
         """Serializes the Widget into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.options: body['options'] = self.options.as_dict()
-        if self.visualization: body['visualization'] = self.visualization.as_dict()
-        if self.width is not None: body['width'] = self.width
+        if self.id is not None:
+            body["id"] = self.id
+        if self.options:
+            body["options"] = self.options.as_dict()
+        if self.visualization:
+            body["visualization"] = self.visualization.as_dict()
+        if self.width is not None:
+            body["width"] = self.width
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Widget into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.id is not None: body['id'] = self.id
-        if self.options: body['options'] = self.options
-        if self.visualization: body['visualization'] = self.visualization
-        if self.width is not None: body['width'] = self.width
+        if self.id is not None:
+            body["id"] = self.id
+        if self.options:
+            body["options"] = self.options
+        if self.visualization:
+            body["visualization"] = self.visualization
+        if self.width is not None:
+            body["width"] = self.width
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Widget:
         """Deserializes the Widget from a dictionary."""
-        return cls(id=d.get('id', None),
-                   options=_from_dict(d, 'options', WidgetOptions),
-                   visualization=_from_dict(d, 'visualization', LegacyVisualization),
-                   width=d.get('width', None))
+        return cls(
+            id=d.get("id", None),
+            options=_from_dict(d, "options", WidgetOptions),
+            visualization=_from_dict(d, "visualization", LegacyVisualization),
+            width=d.get("width", None),
+        )
 
 
 @dataclass
@@ -6380,37 +7696,53 @@ class WidgetOptions:
     def as_dict(self) -> dict:
         """Serializes the WidgetOptions into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.description is not None: body['description'] = self.description
-        if self.is_hidden is not None: body['isHidden'] = self.is_hidden
-        if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings
-        if self.position: body['position'] = self.position.as_dict()
-        if self.title is not None: body['title'] = self.title
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.description is not None:
+            body["description"] = self.description
+        if self.is_hidden is not None:
+            body["isHidden"] = self.is_hidden
+        if self.parameter_mappings:
+            body["parameterMappings"] = self.parameter_mappings
+        if self.position:
+            body["position"] = self.position.as_dict()
+        if self.title is not None:
+            body["title"] = self.title
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WidgetOptions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.description is not None: body['description'] = self.description
-        if self.is_hidden is not None: body['isHidden'] = self.is_hidden
-        if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings
-        if self.position: body['position'] = self.position
-        if self.title is not None: body['title'] = self.title
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.description is not None:
+            body["description"] = self.description
+        if self.is_hidden is not None:
+            body["isHidden"] = self.is_hidden
+        if self.parameter_mappings:
+            body["parameterMappings"] = self.parameter_mappings
+        if self.position:
+            body["position"] = self.position
+        if self.title is not None:
+            body["title"] = self.title
+        if self.updated_at is not None:
+            body["updated_at"] = self.updated_at
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WidgetOptions:
         """Deserializes the WidgetOptions from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   description=d.get('description', None),
-                   is_hidden=d.get('isHidden', None),
-                   parameter_mappings=d.get('parameterMappings', None),
-                   position=_from_dict(d, 'position', WidgetPosition),
-                   title=d.get('title', None),
-                   updated_at=d.get('updated_at', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            description=d.get("description", None),
+            is_hidden=d.get("isHidden", None),
+            parameter_mappings=d.get("parameterMappings", None),
+            position=_from_dict(d, "position", WidgetPosition),
+            title=d.get("title", None),
+            updated_at=d.get("updated_at", None),
+        )
 
 
 @dataclass
@@ -6436,31 +7768,43 @@ class WidgetPosition:
     def as_dict(self) -> dict:
         """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.auto_height is not None: body['autoHeight'] = self.auto_height
-        if self.col is not None: body['col'] = self.col
-        if self.row is not None: body['row'] = self.row
-        if self.size_x is not None: body['sizeX'] = self.size_x
-        if self.size_y is not None: body['sizeY'] = self.size_y
+        if self.auto_height is not None:
+            body["autoHeight"] = self.auto_height
+        if self.col is not None:
+            body["col"] = self.col
+        if self.row is not None:
+            body["row"] = self.row
+        if self.size_x is not None:
+            body["sizeX"] = self.size_x
+        if self.size_y is not None:
+            body["sizeY"] = self.size_y
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WidgetPosition into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.auto_height is not None: body['autoHeight'] = self.auto_height
-        if self.col is not None: body['col'] = self.col
-        if self.row is not None: body['row'] = self.row
-        if self.size_x is not None: body['sizeX'] = self.size_x
-        if self.size_y is not None: body['sizeY'] = self.size_y
+        if self.auto_height is not None:
+            body["autoHeight"] = self.auto_height
+        if self.col is not None:
+            body["col"] = self.col
+        if self.row is not None:
+            body["row"] = self.row
+        if self.size_x is not None:
+            body["sizeX"] = self.size_x
+        if self.size_y is not None:
+            body["sizeY"] = self.size_y
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WidgetPosition:
         """Deserializes the WidgetPosition from a dictionary."""
-        return cls(auto_height=d.get('autoHeight', None),
-                   col=d.get('col', None),
-                   row=d.get('row', None),
-                   size_x=d.get('sizeX', None),
-                   size_y=d.get('sizeY', None))
+        return cls(
+            auto_height=d.get("autoHeight", None),
+            col=d.get("col", None),
+            row=d.get("row", None),
+            size_x=d.get("sizeX", None),
+            size_y=d.get("sizeY", None),
+        )
 
 
 class AlertsAPI:
@@ -6474,85 +7818,105 @@ def __init__(self, api_client):
 
     def create(self, *, alert: Optional[CreateAlertRequestAlert] = None) -> Alert:
         """Create an alert.
-        
+
         Creates an alert.
-        
+
         :param alert: :class:`CreateAlertRequestAlert` (optional)
-        
+
         :returns: :class:`Alert`
         """
         body = {}
-        if alert is not None: body['alert'] = alert.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if alert is not None:
+            body["alert"] = alert.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/sql/alerts', body=body, headers=headers)
+        res = self._api.do("POST", "/api/2.0/sql/alerts", body=body, headers=headers)
         return Alert.from_dict(res)
 
     def delete(self, id: str):
         """Delete an alert.
-        
+
         Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
         can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
         deleted after 30 days.
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/sql/alerts/{id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/sql/alerts/{id}", headers=headers)
 
     def get(self, id: str) -> Alert:
         """Get an alert.
-        
+
         Gets an alert.
-        
+
         :param id: str
-        
+
         :returns: :class:`Alert`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/sql/alerts/{id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/sql/alerts/{id}", headers=headers)
         return Alert.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ListAlertsResponseAlert]:
         """List alerts.
-        
+
         Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
         concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ListAlertsResponseAlert`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/sql/alerts', query=query, headers=headers)
-            if 'results' in json:
-                for v in json['results']:
+            json = self._api.do("GET", "/api/2.0/sql/alerts", query=query, headers=headers)
+            if "results" in json:
+                for v in json["results"]:
                     yield ListAlertsResponseAlert.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
-    def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertRequestAlert] = None) -> Alert:
+    def update(
+        self,
+        id: str,
+        update_mask: str,
+        *,
+        alert: Optional[UpdateAlertRequestAlert] = None,
+    ) -> Alert:
         """Update an alert.
-        
+
         Updates an alert.
-        
+
         :param id: str
         :param update_mask: str
           The field mask must be a single string, with multiple fields separated by commas (no spaces). The
@@ -6560,20 +7924,25 @@ def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertReques
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
         :param alert: :class:`UpdateAlertRequestAlert` (optional)
-        
+
         :returns: :class:`Alert`
         """
         body = {}
-        if alert is not None: body['alert'] = alert.as_dict()
-        if update_mask is not None: body['update_mask'] = update_mask
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if alert is not None:
+            body["alert"] = alert.as_dict()
+        if update_mask is not None:
+            body["update_mask"] = update_mask
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.0/sql/alerts/{id}', body=body, headers=headers)
+        res = self._api.do("PATCH", f"/api/2.0/sql/alerts/{id}", body=body, headers=headers)
         return Alert.from_dict(res)
 
 
@@ -6582,32 +7951,34 @@ class AlertsLegacyAPI:
     periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
     notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
     the Jobs API, e.g. :method:jobs/create.
-    
+
     **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
     more]
-    
+
     [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               options: AlertOptions,
-               query_id: str,
-               *,
-               parent: Optional[str] = None,
-               rearm: Optional[int] = None) -> LegacyAlert:
+    def create(
+        self,
+        name: str,
+        options: AlertOptions,
+        query_id: str,
+        *,
+        parent: Optional[str] = None,
+        rearm: Optional[int] = None,
+    ) -> LegacyAlert:
         """Create an alert.
-        
+
         Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
         condition of its result, and notifies users or notification destinations if the condition was met.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param name: str
           Name of the alert.
         :param options: :class:`AlertOptions`
@@ -6619,94 +7990,114 @@ def create(self,
         :param rearm: int (optional)
           Number of seconds after being triggered before the alert rearms itself and can be triggered again.
           If `null`, alert will never be triggered again.
-        
+
         :returns: :class:`LegacyAlert`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options.as_dict()
-        if parent is not None: body['parent'] = parent
-        if query_id is not None: body['query_id'] = query_id
-        if rearm is not None: body['rearm'] = rearm
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/sql/alerts', body=body, headers=headers)
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options.as_dict()
+        if parent is not None:
+            body["parent"] = parent
+        if query_id is not None:
+            body["query_id"] = query_id
+        if rearm is not None:
+            body["rearm"] = rearm
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/preview/sql/alerts", body=body, headers=headers)
         return LegacyAlert.from_dict(res)
 
     def delete(self, alert_id: str):
         """Delete an alert.
-        
+
         Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
         queries and dashboards, alerts cannot be moved to the trash.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param alert_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/preview/sql/alerts/{alert_id}",
+            headers=headers,
+        )
 
     def get(self, alert_id: str) -> LegacyAlert:
         """Get an alert.
-        
+
         Gets an alert.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param alert_id: str
-        
+
         :returns: :class:`LegacyAlert`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/preview/sql/alerts/{alert_id}", headers=headers)
         return LegacyAlert.from_dict(res)
 
     def list(self) -> Iterator[LegacyAlert]:
         """Get alerts.
-        
+
         Gets a list of alerts.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :returns: Iterator over :class:`LegacyAlert`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/preview/sql/alerts', headers=headers)
+        res = self._api.do("GET", "/api/2.0/preview/sql/alerts", headers=headers)
         return [LegacyAlert.from_dict(v) for v in res]
 
-    def update(self,
-               alert_id: str,
-               name: str,
-               options: AlertOptions,
-               query_id: str,
-               *,
-               rearm: Optional[int] = None):
+    def update(
+        self,
+        alert_id: str,
+        name: str,
+        options: AlertOptions,
+        query_id: str,
+        *,
+        rearm: Optional[int] = None,
+    ):
         """Update an alert.
-        
+
         Updates an alert.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param alert_id: str
         :param name: str
           Name of the alert.
@@ -6717,17 +8108,29 @@ def update(self,
         :param rearm: int (optional)
           Number of seconds after being triggered before the alert rearms itself and can be triggered again.
           If `null`, alert will never be triggered again.
-        
-        
+
+
         """
         body = {}
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options.as_dict()
-        if query_id is not None: body['query_id'] = query_id
-        if rearm is not None: body['rearm'] = rearm
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PUT', f'/api/2.0/preview/sql/alerts/{alert_id}', body=body, headers=headers)
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options.as_dict()
+        if query_id is not None:
+            body["query_id"] = query_id
+        if rearm is not None:
+            body["rearm"] = rearm
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PUT",
+            f"/api/2.0/preview/sql/alerts/{alert_id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class DashboardWidgetsAPI:
@@ -6737,15 +8140,17 @@ class DashboardWidgetsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               dashboard_id: str,
-               options: WidgetOptions,
-               width: int,
-               *,
-               text: Optional[str] = None,
-               visualization_id: Optional[str] = None) -> Widget:
+    def create(
+        self,
+        dashboard_id: str,
+        options: WidgetOptions,
+        width: int,
+        *,
+        text: Optional[str] = None,
+        visualization_id: Optional[str] = None,
+    ) -> Widget:
         """Add widget to a dashboard.
-        
+
         :param dashboard_id: str
           Dashboard ID returned by :method:dashboards/create.
         :param options: :class:`WidgetOptions`
@@ -6756,43 +8161,55 @@ def create(self,
           contains a visualization in the `visualization` field.
         :param visualization_id: str (optional)
           Query Vizualization ID returned by :method:queryvisualizations/create.
-        
+
         :returns: :class:`Widget`
         """
         body = {}
-        if dashboard_id is not None: body['dashboard_id'] = dashboard_id
-        if options is not None: body['options'] = options.as_dict()
-        if text is not None: body['text'] = text
-        if visualization_id is not None: body['visualization_id'] = visualization_id
-        if width is not None: body['width'] = width
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/sql/widgets', body=body, headers=headers)
+        if dashboard_id is not None:
+            body["dashboard_id"] = dashboard_id
+        if options is not None:
+            body["options"] = options.as_dict()
+        if text is not None:
+            body["text"] = text
+        if visualization_id is not None:
+            body["visualization_id"] = visualization_id
+        if width is not None:
+            body["width"] = width
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/preview/sql/widgets", body=body, headers=headers)
         return Widget.from_dict(res)
 
     def delete(self, id: str):
         """Remove widget.
-        
+
         :param id: str
           Widget ID returned by :method:dashboardwidgets/create
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/preview/sql/widgets/{id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/preview/sql/widgets/{id}", headers=headers)
 
-    def update(self,
-               id: str,
-               dashboard_id: str,
-               options: WidgetOptions,
-               width: int,
-               *,
-               text: Optional[str] = None,
-               visualization_id: Optional[str] = None) -> Widget:
+    def update(
+        self,
+        id: str,
+        dashboard_id: str,
+        options: WidgetOptions,
+        width: int,
+        *,
+        text: Optional[str] = None,
+        visualization_id: Optional[str] = None,
+    ) -> Widget:
         """Update existing widget.
-        
+
         :param id: str
           Widget ID returned by :method:dashboardwidgets/create
         :param dashboard_id: str
@@ -6805,18 +8222,31 @@ def update(self,
           contains a visualization in the `visualization` field.
         :param visualization_id: str (optional)
           Query Vizualization ID returned by :method:queryvisualizations/create.
-        
+
         :returns: :class:`Widget`
         """
         body = {}
-        if dashboard_id is not None: body['dashboard_id'] = dashboard_id
-        if options is not None: body['options'] = options.as_dict()
-        if text is not None: body['text'] = text
-        if visualization_id is not None: body['visualization_id'] = visualization_id
-        if width is not None: body['width'] = width
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', f'/api/2.0/preview/sql/widgets/{id}', body=body, headers=headers)
+        if dashboard_id is not None:
+            body["dashboard_id"] = dashboard_id
+        if options is not None:
+            body["options"] = options.as_dict()
+        if text is not None:
+            body["text"] = text
+        if visualization_id is not None:
+            body["visualization_id"] = visualization_id
+        if width is not None:
+            body["width"] = width
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/widgets/{id}",
+            body=body,
+            headers=headers,
+        )
         return Widget.from_dict(res)
 
 
@@ -6830,16 +8260,18 @@ class DashboardsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               name: str,
-               *,
-               dashboard_filters_enabled: Optional[bool] = None,
-               is_favorite: Optional[bool] = None,
-               parent: Optional[str] = None,
-               run_as_role: Optional[RunAsRole] = None,
-               tags: Optional[List[str]] = None) -> Dashboard:
+    def create(
+        self,
+        name: str,
+        *,
+        dashboard_filters_enabled: Optional[bool] = None,
+        is_favorite: Optional[bool] = None,
+        parent: Optional[str] = None,
+        run_as_role: Optional[RunAsRole] = None,
+        tags: Optional[List[str]] = None,
+    ) -> Dashboard:
         """Create a dashboard object.
-        
+
         :param name: str
           The title of this dashboard that appears in list views and at the top of the dashboard page.
         :param dashboard_filters_enabled: bool (optional)
@@ -6852,65 +8284,92 @@ def create(self,
           Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
           viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
         :param tags: List[str] (optional)
-        
+
         :returns: :class:`Dashboard`
         """
         body = {}
         if dashboard_filters_enabled is not None:
-            body['dashboard_filters_enabled'] = dashboard_filters_enabled
-        if is_favorite is not None: body['is_favorite'] = is_favorite
-        if name is not None: body['name'] = name
-        if parent is not None: body['parent'] = parent
-        if run_as_role is not None: body['run_as_role'] = run_as_role.value
-        if tags is not None: body['tags'] = [v for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/sql/dashboards', body=body, headers=headers)
+            body["dashboard_filters_enabled"] = dashboard_filters_enabled
+        if is_favorite is not None:
+            body["is_favorite"] = is_favorite
+        if name is not None:
+            body["name"] = name
+        if parent is not None:
+            body["parent"] = parent
+        if run_as_role is not None:
+            body["run_as_role"] = run_as_role.value
+        if tags is not None:
+            body["tags"] = [v for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/preview/sql/dashboards",
+            body=body,
+            headers=headers,
+        )
         return Dashboard.from_dict(res)
 
     def delete(self, dashboard_id: str):
         """Remove a dashboard.
-        
+
         Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot
         be shared.
-        
+
         :param dashboard_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/preview/sql/dashboards/{dashboard_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/preview/sql/dashboards/{dashboard_id}",
+            headers=headers,
+        )
 
     def get(self, dashboard_id: str) -> Dashboard:
         """Retrieve a definition.
-        
+
         Returns a JSON representation of a dashboard object, including its visualization and query objects.
-        
+
         :param dashboard_id: str
-        
+
         :returns: :class:`Dashboard`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/preview/sql/dashboards/{dashboard_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/preview/sql/dashboards/{dashboard_id}",
+            headers=headers,
+        )
         return Dashboard.from_dict(res)
 
-    def list(self,
-             *,
-             order: Optional[ListOrder] = None,
-             page: Optional[int] = None,
-             page_size: Optional[int] = None,
-             q: Optional[str] = None) -> Iterator[Dashboard]:
+    def list(
+        self,
+        *,
+        order: Optional[ListOrder] = None,
+        page: Optional[int] = None,
+        page_size: Optional[int] = None,
+        q: Optional[str] = None,
+    ) -> Iterator[Dashboard]:
         """Get dashboard objects.
-        
+
         Fetch a paginated list of dashboard objects.
-        
+
         **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
         degradation, or a temporary ban.
-        
+
         :param order: :class:`ListOrder` (optional)
           Name of dashboard attribute to order by.
         :param page: int (optional)
@@ -6919,60 +8378,79 @@ def list(self,
           Number of dashboards to return per page.
         :param q: str (optional)
           Full text search term.
-        
+
         :returns: Iterator over :class:`Dashboard`
         """
 
         query = {}
-        if order is not None: query['order'] = order.value
-        if page is not None: query['page'] = page
-        if page_size is not None: query['page_size'] = page_size
-        if q is not None: query['q'] = q
-        headers = {'Accept': 'application/json', }
+        if order is not None:
+            query["order"] = order.value
+        if page is not None:
+            query["page"] = page
+        if page_size is not None:
+            query["page_size"] = page_size
+        if q is not None:
+            query["q"] = q
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['page'] = 1
+        query["page"] = 1
         while True:
-            json = self._api.do('GET', '/api/2.0/preview/sql/dashboards', query=query, headers=headers)
-            if 'results' in json:
-                for v in json['results']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                "/api/2.0/preview/sql/dashboards",
+                query=query,
+                headers=headers,
+            )
+            if "results" in json:
+                for v in json["results"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield Dashboard.from_dict(v)
-            if 'results' not in json or not json['results']:
+            if "results" not in json or not json["results"]:
                 return
-            query['page'] += 1
+            query["page"] += 1
 
     def restore(self, dashboard_id: str):
         """Restore a dashboard.
-        
+
         A restored dashboard appears in list views and searches and can be shared.
-        
+
         :param dashboard_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('POST', f'/api/2.0/preview/sql/dashboards/trash/{dashboard_id}', headers=headers)
+        self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/dashboards/trash/{dashboard_id}",
+            headers=headers,
+        )
 
-    def update(self,
-               dashboard_id: str,
-               *,
-               name: Optional[str] = None,
-               run_as_role: Optional[RunAsRole] = None,
-               tags: Optional[List[str]] = None) -> Dashboard:
+    def update(
+        self,
+        dashboard_id: str,
+        *,
+        name: Optional[str] = None,
+        run_as_role: Optional[RunAsRole] = None,
+        tags: Optional[List[str]] = None,
+    ) -> Dashboard:
         """Change a dashboard definition.
-        
+
         Modify this dashboard definition. This operation only affects attributes of the dashboard object. It
         does not add, modify, or remove widgets.
-        
+
         **Note**: You cannot undo this operation.
-        
+
         :param dashboard_id: str
         :param name: str (optional)
           The title of this dashboard that appears in list views and at the top of the dashboard page.
@@ -6980,19 +8458,27 @@ def update(self,
           Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
           viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
         :param tags: List[str] (optional)
-        
+
         :returns: :class:`Dashboard`
         """
         body = {}
-        if name is not None: body['name'] = name
-        if run_as_role is not None: body['run_as_role'] = run_as_role.value
-        if tags is not None: body['tags'] = [v for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/preview/sql/dashboards/{dashboard_id}',
-                           body=body,
-                           headers=headers)
+        if name is not None:
+            body["name"] = name
+        if run_as_role is not None:
+            body["run_as_role"] = run_as_role.value
+        if tags is not None:
+            body["tags"] = [v for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/dashboards/{dashboard_id}",
+            body=body,
+            headers=headers,
+        )
         return Dashboard.from_dict(res)
 
 
@@ -7000,13 +8486,13 @@ class DataSourcesAPI:
     """This API is provided to assist you in making new query objects. When creating a query object, you may
     optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't
     already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
-    
+
     This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
     advise you to use any text editor, REST client, or `grep` to search the response from this API for the
     name of your SQL warehouse as it appears in Databricks SQL.
-    
+
     **Note**: A new version of the Databricks SQL API is now available. [Learn more]
-    
+
     [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
@@ -7014,22 +8500,24 @@ def __init__(self, api_client):
 
     def list(self) -> Iterator[DataSource]:
         """Get a list of SQL warehouses.
-        
+
         Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this
         API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
         queries against it.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :returns: Iterator over :class:`DataSource`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/preview/sql/data_sources', headers=headers)
+        res = self._api.do("GET", "/api/2.0/preview/sql/data_sources", headers=headers)
         return [DataSource.from_dict(v) for v in res]
 
 
@@ -7037,17 +8525,17 @@ class DbsqlPermissionsAPI:
     """The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes
     only one endpoint, which gets the Access Control List for a given object. You cannot modify any
     permissions using this API.
-    
+
     There are three levels of permission:
-    
+
     - `CAN_VIEW`: Allows read-only access
-    
+
     - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
-    
+
     - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
-    
+
     **Note**: A new version of the Databricks SQL API is now available. [Learn more]
-    
+
     [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
@@ -7055,219 +8543,267 @@ def __init__(self, api_client):
 
     def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse:
         """Get object ACL.
-        
+
         Gets a JSON representation of the access control list (ACL) for a specified object.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use
         :method:workspace/getpermissions instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param object_type: :class:`ObjectTypePlural`
           The type of object permissions to check.
         :param object_id: str
           Object ID. An ACL is returned for the object with this UUID.
-        
+
         :returns: :class:`GetResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}",
+            headers=headers,
+        )
         return GetResponse.from_dict(res)
 
-    def set(self,
-            object_type: ObjectTypePlural,
-            object_id: str,
-            *,
-            access_control_list: Optional[List[AccessControl]] = None) -> SetResponse:
+    def set(
+        self,
+        object_type: ObjectTypePlural,
+        object_id: str,
+        *,
+        access_control_list: Optional[List[AccessControl]] = None,
+    ) -> SetResponse:
         """Set object ACL.
-        
+
         Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
         ACL.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use
         :method:workspace/setpermissions instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param object_type: :class:`ObjectTypePlural`
           The type of object permission to set.
         :param object_id: str
           Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.
         :param access_control_list: List[:class:`AccessControl`] (optional)
-        
+
         :returns: :class:`SetResponse`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}",
+            body=body,
+            headers=headers,
+        )
         return SetResponse.from_dict(res)
 
-    def transfer_ownership(self,
-                           object_type: OwnableObjectType,
-                           object_id: TransferOwnershipObjectId,
-                           *,
-                           new_owner: Optional[str] = None) -> Success:
+    def transfer_ownership(
+        self,
+        object_type: OwnableObjectType,
+        object_id: TransferOwnershipObjectId,
+        *,
+        new_owner: Optional[str] = None,
+    ) -> Success:
         """Transfer object ownership.
-        
+
         Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
         :method:queries/update and :method:alerts/update respectively instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param object_type: :class:`OwnableObjectType`
           The type of object on which to change ownership.
         :param object_id: :class:`TransferOwnershipObjectId`
           The ID of the object on which to change ownership.
         :param new_owner: str (optional)
           Email address for the new owner, who must exist in the workspace.
-        
+
         :returns: :class:`Success`
         """
         body = {}
-        if new_owner is not None: body['new_owner'] = new_owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}/transfer',
-                           body=body,
-                           headers=headers)
+        if new_owner is not None:
+            body["new_owner"] = new_owner
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}/transfer",
+            body=body,
+            headers=headers,
+        )
         return Success.from_dict(res)
 
 
 class QueriesAPI:
     """The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
     includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
-    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create."""
+    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create(self, *, query: Optional[CreateQueryRequestQuery] = None) -> Query:
         """Create a query.
-        
+
         Creates a query.
-        
+
         :param query: :class:`CreateQueryRequestQuery` (optional)
-        
+
         :returns: :class:`Query`
         """
         body = {}
-        if query is not None: body['query'] = query.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if query is not None:
+            body["query"] = query.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/sql/queries', body=body, headers=headers)
+        res = self._api.do("POST", "/api/2.0/sql/queries", body=body, headers=headers)
         return Query.from_dict(res)
 
     def delete(self, id: str):
         """Delete a query.
-        
+
         Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
         cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
         permanently deleted after 30 days.
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/sql/queries/{id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/sql/queries/{id}", headers=headers)
 
     def get(self, id: str) -> Query:
         """Get a query.
-        
+
         Gets a query.
-        
+
         :param id: str
-        
+
         :returns: :class:`Query`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/sql/queries/{id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/sql/queries/{id}", headers=headers)
         return Query.from_dict(res)
 
-    def list(self,
-             *,
-             page_size: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]:
+    def list(
+        self,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[ListQueryObjectsResponseQuery]:
         """List queries.
-        
+
         Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
         concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
-        
+
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`ListQueryObjectsResponseQuery`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/sql/queries', query=query, headers=headers)
-            if 'results' in json:
-                for v in json['results']:
+            json = self._api.do("GET", "/api/2.0/sql/queries", query=query, headers=headers)
+            if "results" in json:
+                for v in json["results"]:
                     yield ListQueryObjectsResponseQuery.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
-    def list_visualizations(self,
-                            id: str,
-                            *,
-                            page_size: Optional[int] = None,
-                            page_token: Optional[str] = None) -> Iterator[Visualization]:
+    def list_visualizations(
+        self,
+        id: str,
+        *,
+        page_size: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> Iterator[Visualization]:
         """List visualizations on a query.
-        
+
         Gets a list of visualizations on a query.
-        
+
         :param id: str
         :param page_size: int (optional)
         :param page_token: str (optional)
-        
+
         :returns: Iterator over :class:`Visualization`
         """
 
         query = {}
-        if page_size is not None: query['page_size'] = page_size
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_size is not None:
+            query["page_size"] = page_size
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/sql/queries/{id}/visualizations',
-                                query=query,
-                                headers=headers)
-            if 'results' in json:
-                for v in json['results']:
+            json = self._api.do(
+                "GET",
+                f"/api/2.0/sql/queries/{id}/visualizations",
+                query=query,
+                headers=headers,
+            )
+            if "results" in json:
+                for v in json["results"]:
                     yield Visualization.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
-    def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryRequestQuery] = None) -> Query:
+    def update(
+        self,
+        id: str,
+        update_mask: str,
+        *,
+        query: Optional[UpdateQueryRequestQuery] = None,
+    ) -> Query:
         """Update a query.
-        
+
         Updates a query.
-        
+
         :param id: str
         :param update_mask: str
           The field mask must be a single string, with multiple fields separated by commas (no spaces). The
@@ -7275,20 +8811,25 @@ def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryReques
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
         :param query: :class:`UpdateQueryRequestQuery` (optional)
-        
+
         :returns: :class:`Query`
         """
         body = {}
-        if query is not None: body['query'] = query.as_dict()
-        if update_mask is not None: body['update_mask'] = update_mask
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if query is not None:
+            body["query"] = query.as_dict()
+        if update_mask is not None:
+            body["update_mask"] = update_mask
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.0/sql/queries/{id}', body=body, headers=headers)
+        res = self._api.do("PATCH", f"/api/2.0/sql/queries/{id}", body=body, headers=headers)
         return Query.from_dict(res)
 
 
@@ -7296,45 +8837,47 @@ class QueriesLegacyAPI:
     """These endpoints are used for CRUD operations on query definitions. Query definitions include the target
     SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
     scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
-    
+
     **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
     more]
-    
+
     [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               data_source_id: Optional[str] = None,
-               description: Optional[str] = None,
-               name: Optional[str] = None,
-               options: Optional[Any] = None,
-               parent: Optional[str] = None,
-               query: Optional[str] = None,
-               run_as_role: Optional[RunAsRole] = None,
-               tags: Optional[List[str]] = None) -> LegacyQuery:
+    def create(
+        self,
+        *,
+        data_source_id: Optional[str] = None,
+        description: Optional[str] = None,
+        name: Optional[str] = None,
+        options: Optional[Any] = None,
+        parent: Optional[str] = None,
+        query: Optional[str] = None,
+        run_as_role: Optional[RunAsRole] = None,
+        tags: Optional[List[str]] = None,
+    ) -> LegacyQuery:
         """Create a new query definition.
-        
+
         Creates a new query definition. Queries created with this endpoint belong to the authenticated user
         making the request.
-        
+
         The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
         use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
         `data_source_id` from an existing query.
-        
+
         **Note**: You cannot add a visualization until you create the query.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param data_source_id: str (optional)
           Data source ID maps to the ID of the data source used by the resource and is distinct from the
           warehouse ID. [Learn more]
-          
+
           [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
         :param description: str (optional)
           General description that conveys additional information about this query such as usage notes.
@@ -7352,95 +8895,116 @@ def create(self,
           Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
           viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
         :param tags: List[str] (optional)
-        
+
         :returns: :class:`LegacyQuery`
         """
         body = {}
-        if data_source_id is not None: body['data_source_id'] = data_source_id
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options
-        if parent is not None: body['parent'] = parent
-        if query is not None: body['query'] = query
-        if run_as_role is not None: body['run_as_role'] = run_as_role.value
-        if tags is not None: body['tags'] = [v for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers)
+        if data_source_id is not None:
+            body["data_source_id"] = data_source_id
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options
+        if parent is not None:
+            body["parent"] = parent
+        if query is not None:
+            body["query"] = query
+        if run_as_role is not None:
+            body["run_as_role"] = run_as_role.value
+        if tags is not None:
+            body["tags"] = [v for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/preview/sql/queries", body=body, headers=headers)
         return LegacyQuery.from_dict(res)
 
     def delete(self, query_id: str):
         """Delete a query.
-        
+
         Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
         they cannot be used for alerts. The trash is deleted after 30 days.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param query_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/preview/sql/queries/{query_id}",
+            headers=headers,
+        )
 
     def get(self, query_id: str) -> LegacyQuery:
         """Get a query definition.
-        
+
         Retrieve a query object definition along with contextual permissions information about the currently
         authenticated user.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param query_id: str
-        
+
         :returns: :class:`LegacyQuery`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/preview/sql/queries/{query_id}", headers=headers)
         return LegacyQuery.from_dict(res)
 
-    def list(self,
-             *,
-             order: Optional[str] = None,
-             page: Optional[int] = None,
-             page_size: Optional[int] = None,
-             q: Optional[str] = None) -> Iterator[LegacyQuery]:
+    def list(
+        self,
+        *,
+        order: Optional[str] = None,
+        page: Optional[int] = None,
+        page_size: Optional[int] = None,
+        q: Optional[str] = None,
+    ) -> Iterator[LegacyQuery]:
         """Get a list of queries.
-        
+
         Gets a list of queries. Optionally, this list can be filtered by a search term.
-        
+
         **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
         degradation, or a temporary ban.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param order: str (optional)
           Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
           descending instead.
-          
+
           - `name`: The name of the query.
-          
+
           - `created_at`: The timestamp the query was created.
-          
+
           - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
           value is treated as the highest value for sorting.
-          
+
           - `executed_at`: The timestamp when the query was last run.
-          
+
           - `created_by`: The user name of the user that created the query.
         :param page: int (optional)
           Page number to retrieve.
@@ -7448,79 +9012,98 @@ def list(self,
           Number of queries to return per page.
         :param q: str (optional)
           Full text search term
-        
+
         :returns: Iterator over :class:`LegacyQuery`
         """
 
         query = {}
-        if order is not None: query['order'] = order
-        if page is not None: query['page'] = page
-        if page_size is not None: query['page_size'] = page_size
-        if q is not None: query['q'] = q
-        headers = {'Accept': 'application/json', }
+        if order is not None:
+            query["order"] = order
+        if page is not None:
+            query["page"] = page
+        if page_size is not None:
+            query["page_size"] = page_size
+        if q is not None:
+            query["q"] = q
+        headers = {
+            "Accept": "application/json",
+        }
 
         # deduplicate items that may have been added during iteration
         seen = set()
-        query['page'] = 1
+        query["page"] = 1
         while True:
-            json = self._api.do('GET', '/api/2.0/preview/sql/queries', query=query, headers=headers)
-            if 'results' in json:
-                for v in json['results']:
-                    i = v['id']
+            json = self._api.do(
+                "GET",
+                "/api/2.0/preview/sql/queries",
+                query=query,
+                headers=headers,
+            )
+            if "results" in json:
+                for v in json["results"]:
+                    i = v["id"]
                     if i in seen:
                         continue
                     seen.add(i)
                     yield LegacyQuery.from_dict(v)
-            if 'results' not in json or not json['results']:
+            if "results" not in json or not json["results"]:
                 return
-            query['page'] += 1
+            query["page"] += 1
 
     def restore(self, query_id: str):
         """Restore a query.
-        
+
         Restore a query that has been moved to the trash. A restored query appears in list views and searches.
         You can use restored queries for alerts.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
         [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param query_id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('POST', f'/api/2.0/preview/sql/queries/trash/{query_id}', headers=headers)
+        self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/queries/trash/{query_id}",
+            headers=headers,
+        )
 
-    def update(self,
-               query_id: str,
-               *,
-               data_source_id: Optional[str] = None,
-               description: Optional[str] = None,
-               name: Optional[str] = None,
-               options: Optional[Any] = None,
-               query: Optional[str] = None,
-               run_as_role: Optional[RunAsRole] = None,
-               tags: Optional[List[str]] = None) -> LegacyQuery:
+    def update(
+        self,
+        query_id: str,
+        *,
+        data_source_id: Optional[str] = None,
+        description: Optional[str] = None,
+        name: Optional[str] = None,
+        options: Optional[Any] = None,
+        query: Optional[str] = None,
+        run_as_role: Optional[RunAsRole] = None,
+        tags: Optional[List[str]] = None,
+    ) -> LegacyQuery:
         """Change a query definition.
-        
+
         Modify this query definition.
-        
+
         **Note**: You cannot undo this operation.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
         instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param query_id: str
         :param data_source_id: str (optional)
           Data source ID maps to the ID of the data source used by the resource and is distinct from the
           warehouse ID. [Learn more]
-          
+
           [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
         :param description: str (optional)
           General description that conveys additional information about this query such as usage notes.
@@ -7536,20 +9119,35 @@ def update(self,
           Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
           viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
         :param tags: List[str] (optional)
-        
+
         :returns: :class:`LegacyQuery`
         """
         body = {}
-        if data_source_id is not None: body['data_source_id'] = data_source_id
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options
-        if query is not None: body['query'] = query
-        if run_as_role is not None: body['run_as_role'] = run_as_role.value
-        if tags is not None: body['tags'] = [v for v in tags]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers)
+        if data_source_id is not None:
+            body["data_source_id"] = data_source_id
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options
+        if query is not None:
+            body["query"] = query
+        if run_as_role is not None:
+            body["run_as_role"] = run_as_role.value
+        if tags is not None:
+            body["tags"] = [v for v in tags]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/queries/{query_id}",
+            body=body,
+            headers=headers,
+        )
         return LegacyQuery.from_dict(res)
 
 
@@ -7560,20 +9158,22 @@ class QueryHistoryAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def list(self,
-             *,
-             filter_by: Optional[QueryFilter] = None,
-             include_metrics: Optional[bool] = None,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> ListQueriesResponse:
+    def list(
+        self,
+        *,
+        filter_by: Optional[QueryFilter] = None,
+        include_metrics: Optional[bool] = None,
+        max_results: Optional[int] = None,
+        page_token: Optional[str] = None,
+    ) -> ListQueriesResponse:
         """List Queries.
-        
+
         List the history of queries through SQL warehouses, and serverless compute.
-        
+
         You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
         returned first (up to max_results in request). The pagination token returned in response can be used
         to list subsequent query statuses.
-        
+
         :param filter_by: :class:`QueryFilter` (optional)
           A filter to limit query history results. This field is optional.
         :param include_metrics: bool (optional)
@@ -7585,18 +9185,24 @@ def list(self,
           A token that can be used to get the next page of results. The token can contains characters that
           need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
           %2B. This field is optional.
-        
+
         :returns: :class:`ListQueriesResponse`
         """
 
         query = {}
-        if filter_by is not None: query['filter_by'] = filter_by.as_dict()
-        if include_metrics is not None: query['include_metrics'] = include_metrics
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers)
+        if filter_by is not None:
+            query["filter_by"] = filter_by.as_dict()
+        if include_metrics is not None:
+            query["include_metrics"] = include_metrics
+        if max_results is not None:
+            query["max_results"] = max_results
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.0/sql/history/queries", query=query, headers=headers)
         return ListQueriesResponse.from_dict(res)
 
 
@@ -7607,47 +9213,57 @@ class QueryVisualizationsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               *,
-               visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization:
+    def create(
+        self,
+        *,
+        visualization: Optional[CreateVisualizationRequestVisualization] = None,
+    ) -> Visualization:
         """Add a visualization to a query.
-        
+
         Adds a visualization to a query.
-        
+
         :param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
-        
+
         :returns: :class:`Visualization`
         """
         body = {}
-        if visualization is not None: body['visualization'] = visualization.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if visualization is not None:
+            body["visualization"] = visualization.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('POST', '/api/2.0/sql/visualizations', body=body, headers=headers)
+        res = self._api.do("POST", "/api/2.0/sql/visualizations", body=body, headers=headers)
         return Visualization.from_dict(res)
 
     def delete(self, id: str):
         """Remove a visualization.
-        
+
         Removes a visualization.
-        
+
         :param id: str
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/sql/visualizations/{id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/sql/visualizations/{id}", headers=headers)
 
-    def update(self,
-               id: str,
-               update_mask: str,
-               *,
-               visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization:
+    def update(
+        self,
+        id: str,
+        update_mask: str,
+        *,
+        visualization: Optional[UpdateVisualizationRequestVisualization] = None,
+    ) -> Visualization:
         """Update a visualization.
-        
+
         Updates a visualization.
-        
+
         :param id: str
         :param update_mask: str
           The field mask must be a single string, with multiple fields separated by commas (no spaces). The
@@ -7655,51 +9271,63 @@ def update(self,
           `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
           the entire collection field can be specified. Field names must exactly match the resource field
           names.
-          
+
           A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
           fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
           changes in the future.
         :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
-        
+
         :returns: :class:`Visualization`
         """
         body = {}
-        if update_mask is not None: body['update_mask'] = update_mask
-        if visualization is not None: body['visualization'] = visualization.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.0/sql/visualizations/{id}', body=body, headers=headers)
+        if update_mask is not None:
+            body["update_mask"] = update_mask
+        if visualization is not None:
+            body["visualization"] = visualization.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/sql/visualizations/{id}",
+            body=body,
+            headers=headers,
+        )
         return Visualization.from_dict(res)
 
 
 class QueryVisualizationsLegacyAPI:
     """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
     within the Databricks Workspace. Data structures may change over time.
-    
+
     **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
     more]
-    
+
     [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html"""
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               query_id: str,
-               type: str,
-               options: Any,
-               *,
-               description: Optional[str] = None,
-               name: Optional[str] = None) -> LegacyVisualization:
+    def create(
+        self,
+        query_id: str,
+        type: str,
+        options: Any,
+        *,
+        description: Optional[str] = None,
+        name: Optional[str] = None,
+    ) -> LegacyVisualization:
         """Add visualization to a query.
-        
+
         Creates visualization in the query.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use
         :method:queryvisualizations/create instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param query_id: str
           The identifier returned by :method:queries/create
         :param type: str
@@ -7711,59 +9339,80 @@ def create(self,
           A short description of this visualization. This is not displayed in the UI.
         :param name: str (optional)
           The name of the visualization that appears on dashboards and the query screen.
-        
+
         :returns: :class:`LegacyVisualization`
         """
         body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options
-        if query_id is not None: body['query_id'] = query_id
-        if type is not None: body['type'] = type
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/preview/sql/visualizations', body=body, headers=headers)
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options
+        if query_id is not None:
+            body["query_id"] = query_id
+        if type is not None:
+            body["type"] = type
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/preview/sql/visualizations",
+            body=body,
+            headers=headers,
+        )
         return LegacyVisualization.from_dict(res)
 
     def delete(self, id: str):
         """Remove visualization.
-        
+
         Removes a visualization from the query.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use
         :method:queryvisualizations/delete instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param id: str
           Widget ID returned by :method:queryvizualisations/create
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/preview/sql/visualizations/{id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/preview/sql/visualizations/{id}",
+            headers=headers,
+        )
 
-    def update(self,
-               id: str,
-               *,
-               created_at: Optional[str] = None,
-               description: Optional[str] = None,
-               name: Optional[str] = None,
-               options: Optional[Any] = None,
-               query: Optional[LegacyQuery] = None,
-               type: Optional[str] = None,
-               updated_at: Optional[str] = None) -> LegacyVisualization:
+    def update(
+        self,
+        id: str,
+        *,
+        created_at: Optional[str] = None,
+        description: Optional[str] = None,
+        name: Optional[str] = None,
+        options: Optional[Any] = None,
+        query: Optional[LegacyQuery] = None,
+        type: Optional[str] = None,
+        updated_at: Optional[str] = None,
+    ) -> LegacyVisualization:
         """Edit existing visualization.
-        
+
         Updates visualization in the query.
-        
+
         **Note**: A new version of the Databricks SQL API is now available. Please use
         :method:queryvisualizations/update instead. [Learn more]
-        
+
         [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
+
         :param id: str
           The UUID for this visualization.
         :param created_at: str (optional)
@@ -7778,20 +9427,35 @@ def update(self,
         :param type: str (optional)
           The type of visualization: chart, table, pivot table, and so on.
         :param updated_at: str (optional)
-        
+
         :returns: :class:`LegacyVisualization`
         """
         body = {}
-        if created_at is not None: body['created_at'] = created_at
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if options is not None: body['options'] = options
-        if query is not None: body['query'] = query.as_dict()
-        if type is not None: body['type'] = type
-        if updated_at is not None: body['updated_at'] = updated_at
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', f'/api/2.0/preview/sql/visualizations/{id}', body=body, headers=headers)
+        if created_at is not None:
+            body["created_at"] = created_at
+        if description is not None:
+            body["description"] = description
+        if name is not None:
+            body["name"] = name
+        if options is not None:
+            body["options"] = options
+        if query is not None:
+            body["query"] = query.as_dict()
+        if type is not None:
+            body["type"] = type
+        if updated_at is not None:
+            body["updated_at"] = updated_at
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/preview/sql/visualizations/{id}",
+            body=body,
+            headers=headers,
+        )
         return LegacyVisualization.from_dict(res)
 
 
@@ -7803,26 +9467,28 @@ def __init__(self, api_client):
 
     def get_config(self) -> ClientConfig:
         """Read workspace configuration for Redash-v2.
-        
+
         :returns: :class:`ClientConfig`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/redash-v2/config', headers=headers)
+        res = self._api.do("GET", "/api/2.0/redash-v2/config", headers=headers)
         return ClientConfig.from_dict(res)
 
 
 class StatementExecutionAPI:
     """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and
     fetch the result.
-    
+
     **Getting started**
-    
+
     We suggest beginning with the [Databricks SQL Statement Execution API tutorial].
-    
+
     **Overview of statement execution and result fetching**
-    
+
     Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid
     SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format.
     If no other parameters are specified, the server will wait for up to 10s before returning a response. If
@@ -7830,7 +9496,7 @@ class StatementExecutionAPI:
     array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will
     provide the statement ID that can be used to poll for results by using a
     :method:statementexecution/getStatement request.
-    
+
     You can specify whether the call should behave synchronously, asynchronously or start synchronously with a
     fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout`
     settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to
@@ -7838,7 +9504,7 @@ class StatementExecutionAPI:
     statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while
     the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to
     asynchronous mode, or it can be set to `CANCEL`, which cancels the statement.
-    
+
     In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30
     seconds; if the statement execution finishes within this time, the result data is returned directly in the
     response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns
@@ -7850,38 +9516,38 @@ class StatementExecutionAPI:
     seconds; if the statement execution finishes within this time, the result data is returned directly in the
     response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can
     be used to fetch status and results in the same way as in the asynchronous mode.
-    
+
     Depending on the size, the result can be split into multiple chunks. If the statement execution is
     successful, the statement response contains a manifest and the first chunk of the result. The manifest
     contains schema information and provides metadata for each chunk in the result. Result chunks can be
     retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any
     order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a
     `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk.
-    
+
     A statement can be canceled with :method:statementexecution/cancelExecution.
-    
+
     **Fetching result data: format and disposition**
-    
+
     To specify the format of the result data, use the `format` field, which can be set to one of the following
     options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`.
-    
+
     There are two ways to receive statement results, controlled by the `disposition` setting, which can be
     either `INLINE` or `EXTERNAL_LINKS`:
-    
+
     - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for
     smaller results. This mode can only be used with the `JSON_ARRAY` format.
-    
+
     - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data
     in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode
     can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`.
-    
+
     By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`.
-    
+
     **Limits and limitations**
-    
+
     Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly
     match the byte count of the actual payload.
-    
+
     - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. -
     Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit
     will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum
@@ -7894,50 +9560,57 @@ class StatementExecutionAPI:
     once every 15 minutes. - The results are only available for one hour after success; polling does not
     extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
     you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
-    
+
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
-    [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html"""
+    [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def cancel_execution(self, statement_id: str):
         """Cancel statement execution.
-        
+
         Requests that an executing statement be canceled. Callers must poll for status to see the terminal
         state.
-        
+
         :param statement_id: str
           The statement ID is returned upon successfully submitting a SQL statement, and is a required
           reference for all subsequent calls.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('POST', f'/api/2.0/sql/statements/{statement_id}/cancel', headers=headers)
-
-    def execute_statement(self,
-                          statement: str,
-                          warehouse_id: str,
-                          *,
-                          byte_limit: Optional[int] = None,
-                          catalog: Optional[str] = None,
-                          disposition: Optional[Disposition] = None,
-                          format: Optional[Format] = None,
-                          on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None,
-                          parameters: Optional[List[StatementParameterListItem]] = None,
-                          row_limit: Optional[int] = None,
-                          schema: Optional[str] = None,
-                          wait_timeout: Optional[str] = None) -> StatementResponse:
+        self._api.do(
+            "POST",
+            f"/api/2.0/sql/statements/{statement_id}/cancel",
+            headers=headers,
+        )
+
+    def execute_statement(
+        self,
+        statement: str,
+        warehouse_id: str,
+        *,
+        byte_limit: Optional[int] = None,
+        catalog: Optional[str] = None,
+        disposition: Optional[Disposition] = None,
+        format: Optional[Format] = None,
+        on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None,
+        parameters: Optional[List[StatementParameterListItem]] = None,
+        row_limit: Optional[int] = None,
+        schema: Optional[str] = None,
+        wait_timeout: Optional[str] = None,
+    ) -> StatementResponse:
         """Execute a SQL statement.
-        
+
         :param statement: str
           The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
         :param warehouse_id: str
           Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
-          
+
           [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
         :param byte_limit: int (optional)
           Applies the given byte limit to the statement's result size. Byte counts are based on internal data
@@ -7947,37 +9620,37 @@ def execute_statement(self,
           explcitly set.
         :param catalog: str (optional)
           Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL.
-          
+
           [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
         :param disposition: :class:`Disposition` (optional)
         :param format: :class:`Format` (optional)
           Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
           `CSV`.
-          
+
           Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS`
           disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition.
-          
+
           When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values,
           where each value is either the *string representation* of a value, or `null`. For example, the
           output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would
           look like this:
-          
+
           ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ```
-          
+
           When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result
           contains compact JSON with no indentation or extra whitespace.
-          
+
           When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result
           will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format].
-          
+
           When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a
           CSV according to [RFC 4180] standard. All the columns values will have *string representation*
           similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first
           chunk in the result would contain a header row with column names. For example, the output of `SELECT
           concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this:
-          
+
           ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ```
-          
+
           [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format
           [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180
         :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional)
@@ -7992,27 +9665,27 @@ def execute_statement(self,
           of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be
           omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a
           string.
-          
+
           If the type is given, parameters will be checked for type correctness according to the given type. A
           value is correct if the provided string can be converted to the requested type using the `cast`
           function. The exact semantics are described in the section [`cast` function] of the SQL language
           reference.
-          
+
           For example, the following statement contains two parameters, `my_name` and `my_date`:
-          
+
           SELECT * FROM my_table WHERE name = :my_name AND date = :my_date
-          
+
           The parameters can be passed in the request body as follows:
-          
+
           { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date",
           "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value":
           "2020-01-01", "type": "DATE" } ] }
-          
+
           Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL
           Statement Execution API.
-          
+
           Also see the section [Parameter markers] of the SQL language reference.
-          
+
           [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html
           [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html
         :param row_limit: int (optional)
@@ -8021,111 +9694,136 @@ def execute_statement(self,
           the limit or not.
         :param schema: str (optional)
           Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL.
-          
+
           [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html
         :param wait_timeout: str (optional)
           The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set
           to 0 or to a value between 5 and 50.
-          
+
           When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the
           execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID
           which can be used for polling with :method:statementexecution/getStatement.
-          
+
           When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait
           for the statement execution to finish. If the execution finishes within this time, the call returns
           immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If
           the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
           timeout is reached.
-        
+
         :returns: :class:`StatementResponse`
         """
         body = {}
-        if byte_limit is not None: body['byte_limit'] = byte_limit
-        if catalog is not None: body['catalog'] = catalog
-        if disposition is not None: body['disposition'] = disposition.value
-        if format is not None: body['format'] = format.value
-        if on_wait_timeout is not None: body['on_wait_timeout'] = on_wait_timeout.value
-        if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters]
-        if row_limit is not None: body['row_limit'] = row_limit
-        if schema is not None: body['schema'] = schema
-        if statement is not None: body['statement'] = statement
-        if wait_timeout is not None: body['wait_timeout'] = wait_timeout
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/sql/statements/', body=body, headers=headers)
+        if byte_limit is not None:
+            body["byte_limit"] = byte_limit
+        if catalog is not None:
+            body["catalog"] = catalog
+        if disposition is not None:
+            body["disposition"] = disposition.value
+        if format is not None:
+            body["format"] = format.value
+        if on_wait_timeout is not None:
+            body["on_wait_timeout"] = on_wait_timeout.value
+        if parameters is not None:
+            body["parameters"] = [v.as_dict() for v in parameters]
+        if row_limit is not None:
+            body["row_limit"] = row_limit
+        if schema is not None:
+            body["schema"] = schema
+        if statement is not None:
+            body["statement"] = statement
+        if wait_timeout is not None:
+            body["wait_timeout"] = wait_timeout
+        if warehouse_id is not None:
+            body["warehouse_id"] = warehouse_id
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/sql/statements/", body=body, headers=headers)
         return StatementResponse.from_dict(res)
 
     def get_statement(self, statement_id: str) -> StatementResponse:
         """Get status, manifest, and result first chunk.
-        
+
         This request can be used to poll for the statement's status. When the `status.state` field is
         `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the
         statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the
         state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and
         further calls will receive an HTTP 404 response.
-        
+
         **NOTE** This call currently might take up to 5 seconds to get the latest status and result.
-        
+
         :param statement_id: str
           The statement ID is returned upon successfully submitting a SQL statement, and is a required
           reference for all subsequent calls.
-        
+
         :returns: :class:`StatementResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/sql/statements/{statement_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/sql/statements/{statement_id}", headers=headers)
         return StatementResponse.from_dict(res)
 
     def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData:
         """Get result chunk by index.
-        
+
         After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index.
         Whereas the first chunk with `chunk_index=0` is typically fetched with
         :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request
         can be used to fetch subsequent chunks. The response structure is identical to the nested `result`
         element described in the :method:statementexecution/getStatement request, and similarly includes the
         `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set.
-        
+
         :param statement_id: str
           The statement ID is returned upon successfully submitting a SQL statement, and is a required
           reference for all subsequent calls.
         :param chunk_index: int
-        
+
         :returns: :class:`ResultData`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}",
+            headers=headers,
+        )
         return ResultData.from_dict(res)
 
 
 class WarehousesAPI:
     """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks
-    SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud."""
+    SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def wait_get_warehouse_running(
-            self,
-            id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GetWarehouseResponse], None]] = None) -> GetWarehouseResponse:
+        self,
+        id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[GetWarehouseResponse], None]] = None,
+    ) -> GetWarehouseResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (State.RUNNING, )
-        failure_states = (State.STOPPED, State.DELETED, )
-        status_message = 'polling...'
+        target_states = (State.RUNNING,)
+        failure_states = (
+            State.STOPPED,
+            State.DELETED,
+        )
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(id=id)
             status = poll.state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.health:
                 status_message = poll.health.summary
             if status in target_states:
@@ -8133,31 +9831,32 @@ def wait_get_warehouse_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach RUNNING, got {status}: {status_message}'
+                msg = f"failed to reach RUNNING, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"id={id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def wait_get_warehouse_stopped(
-            self,
-            id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GetWarehouseResponse], None]] = None) -> GetWarehouseResponse:
+        self,
+        id: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[GetWarehouseResponse], None]] = None,
+    ) -> GetWarehouseResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (State.STOPPED, )
-        status_message = 'polling...'
+        target_states = (State.STOPPED,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get(id=id)
             status = poll.state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.health:
                 status_message = poll.health.summary
             if status in target_states:
@@ -8169,39 +9868,39 @@ def wait_get_warehouse_stopped(
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def create(
-            self,
-            *,
-            auto_stop_mins: Optional[int] = None,
-            channel: Optional[Channel] = None,
-            cluster_size: Optional[str] = None,
-            creator_name: Optional[str] = None,
-            enable_photon: Optional[bool] = None,
-            enable_serverless_compute: Optional[bool] = None,
-            instance_profile_arn: Optional[str] = None,
-            max_num_clusters: Optional[int] = None,
-            min_num_clusters: Optional[int] = None,
-            name: Optional[str] = None,
-            spot_instance_policy: Optional[SpotInstancePolicy] = None,
-            tags: Optional[EndpointTags] = None,
-            warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None
+        self,
+        *,
+        auto_stop_mins: Optional[int] = None,
+        channel: Optional[Channel] = None,
+        cluster_size: Optional[str] = None,
+        creator_name: Optional[str] = None,
+        enable_photon: Optional[bool] = None,
+        enable_serverless_compute: Optional[bool] = None,
+        instance_profile_arn: Optional[str] = None,
+        max_num_clusters: Optional[int] = None,
+        min_num_clusters: Optional[int] = None,
+        name: Optional[str] = None,
+        spot_instance_policy: Optional[SpotInstancePolicy] = None,
+        tags: Optional[EndpointTags] = None,
+        warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None,
     ) -> Wait[GetWarehouseResponse]:
         """Create a warehouse.
-        
+
         Creates a new SQL warehouse.
-        
+
         :param auto_stop_mins: int (optional)
           The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
           is automatically stopped.
-          
+
           Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
           non-serverless warehouses - 0 indicates no autostop.
-          
+
           Defaults to 120 mins
         :param channel: :class:`Channel` (optional)
           Channel Details
@@ -8209,14 +9908,14 @@ def create(
           Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
           to run larger queries on it. If you want to increase the number of concurrent queries, please tune
           max_num_clusters.
-          
+
           Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
           4X-Large
         :param creator_name: str (optional)
           warehouse creator name
         :param enable_photon: bool (optional)
           Configures whether the warehouse should use Photon optimized clusters.
-          
+
           Defaults to false.
         :param enable_serverless_compute: bool (optional)
           Configures whether the warehouse should use serverless compute
@@ -8224,58 +9923,75 @@ def create(
           Deprecated. Instance profile used to pass IAM role to the cluster
         :param max_num_clusters: int (optional)
           Maximum number of clusters that the autoscaler will create to handle concurrent queries.
-          
+
           Supported values: - Must be >= min_num_clusters - Must be <= 30.
-          
+
           Defaults to min_clusters if unset.
         :param min_num_clusters: int (optional)
           Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
           will ensure that a larger number of clusters are always running and therefore may reduce the cold
           start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
-          
+
           Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
-          
+
           Defaults to 1
         :param name: str (optional)
           Logical name for the cluster.
-          
+
           Supported values: - Must be unique within an org. - Must be less than 100 characters.
         :param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
           Configurations whether the warehouse should use spot instances.
         :param tags: :class:`EndpointTags` (optional)
           A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
           associated with this SQL warehouse.
-          
+
           Supported values: - Number of tags < 45.
         :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional)
           Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
           also set the field `enable_serverless_compute` to `true`.
-        
+
         :returns:
           Long-running operation waiter for :class:`GetWarehouseResponse`.
           See :method:wait_get_warehouse_running for more details.
         """
         body = {}
-        if auto_stop_mins is not None: body['auto_stop_mins'] = auto_stop_mins
-        if channel is not None: body['channel'] = channel.as_dict()
-        if cluster_size is not None: body['cluster_size'] = cluster_size
-        if creator_name is not None: body['creator_name'] = creator_name
-        if enable_photon is not None: body['enable_photon'] = enable_photon
+        if auto_stop_mins is not None:
+            body["auto_stop_mins"] = auto_stop_mins
+        if channel is not None:
+            body["channel"] = channel.as_dict()
+        if cluster_size is not None:
+            body["cluster_size"] = cluster_size
+        if creator_name is not None:
+            body["creator_name"] = creator_name
+        if enable_photon is not None:
+            body["enable_photon"] = enable_photon
         if enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = enable_serverless_compute
-        if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn
-        if max_num_clusters is not None: body['max_num_clusters'] = max_num_clusters
-        if min_num_clusters is not None: body['min_num_clusters'] = min_num_clusters
-        if name is not None: body['name'] = name
-        if spot_instance_policy is not None: body['spot_instance_policy'] = spot_instance_policy.value
-        if tags is not None: body['tags'] = tags.as_dict()
-        if warehouse_type is not None: body['warehouse_type'] = warehouse_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.0/sql/warehouses', body=body, headers=headers)
-        return Wait(self.wait_get_warehouse_running,
-                    response=CreateWarehouseResponse.from_dict(op_response),
-                    id=op_response['id'])
+            body["enable_serverless_compute"] = enable_serverless_compute
+        if instance_profile_arn is not None:
+            body["instance_profile_arn"] = instance_profile_arn
+        if max_num_clusters is not None:
+            body["max_num_clusters"] = max_num_clusters
+        if min_num_clusters is not None:
+            body["min_num_clusters"] = min_num_clusters
+        if name is not None:
+            body["name"] = name
+        if spot_instance_policy is not None:
+            body["spot_instance_policy"] = spot_instance_policy.value
+        if tags is not None:
+            body["tags"] = tags.as_dict()
+        if warehouse_type is not None:
+            body["warehouse_type"] = warehouse_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do("POST", "/api/2.0/sql/warehouses", body=body, headers=headers)
+        return Wait(
+            self.wait_get_warehouse_running,
+            response=CreateWarehouseResponse.from_dict(op_response),
+            id=op_response["id"],
+        )
 
     def create_and_wait(
         self,
@@ -8293,66 +10009,71 @@ def create_and_wait(
         spot_instance_policy: Optional[SpotInstancePolicy] = None,
         tags: Optional[EndpointTags] = None,
         warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None,
-        timeout=timedelta(minutes=20)
+        timeout=timedelta(minutes=20),
     ) -> GetWarehouseResponse:
-        return self.create(auto_stop_mins=auto_stop_mins,
-                           channel=channel,
-                           cluster_size=cluster_size,
-                           creator_name=creator_name,
-                           enable_photon=enable_photon,
-                           enable_serverless_compute=enable_serverless_compute,
-                           instance_profile_arn=instance_profile_arn,
-                           max_num_clusters=max_num_clusters,
-                           min_num_clusters=min_num_clusters,
-                           name=name,
-                           spot_instance_policy=spot_instance_policy,
-                           tags=tags,
-                           warehouse_type=warehouse_type).result(timeout=timeout)
+        return self.create(
+            auto_stop_mins=auto_stop_mins,
+            channel=channel,
+            cluster_size=cluster_size,
+            creator_name=creator_name,
+            enable_photon=enable_photon,
+            enable_serverless_compute=enable_serverless_compute,
+            instance_profile_arn=instance_profile_arn,
+            max_num_clusters=max_num_clusters,
+            min_num_clusters=min_num_clusters,
+            name=name,
+            spot_instance_policy=spot_instance_policy,
+            tags=tags,
+            warehouse_type=warehouse_type,
+        ).result(timeout=timeout)
 
     def delete(self, id: str):
         """Delete a warehouse.
-        
+
         Deletes a SQL warehouse.
-        
+
         :param id: str
           Required. Id of the SQL warehouse.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/sql/warehouses/{id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/sql/warehouses/{id}", headers=headers)
 
     def edit(
-            self,
-            id: str,
-            *,
-            auto_stop_mins: Optional[int] = None,
-            channel: Optional[Channel] = None,
-            cluster_size: Optional[str] = None,
-            creator_name: Optional[str] = None,
-            enable_photon: Optional[bool] = None,
-            enable_serverless_compute: Optional[bool] = None,
-            instance_profile_arn: Optional[str] = None,
-            max_num_clusters: Optional[int] = None,
-            min_num_clusters: Optional[int] = None,
-            name: Optional[str] = None,
-            spot_instance_policy: Optional[SpotInstancePolicy] = None,
-            tags: Optional[EndpointTags] = None,
-            warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None) -> Wait[GetWarehouseResponse]:
+        self,
+        id: str,
+        *,
+        auto_stop_mins: Optional[int] = None,
+        channel: Optional[Channel] = None,
+        cluster_size: Optional[str] = None,
+        creator_name: Optional[str] = None,
+        enable_photon: Optional[bool] = None,
+        enable_serverless_compute: Optional[bool] = None,
+        instance_profile_arn: Optional[str] = None,
+        max_num_clusters: Optional[int] = None,
+        min_num_clusters: Optional[int] = None,
+        name: Optional[str] = None,
+        spot_instance_policy: Optional[SpotInstancePolicy] = None,
+        tags: Optional[EndpointTags] = None,
+        warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None,
+    ) -> Wait[GetWarehouseResponse]:
         """Update a warehouse.
-        
+
         Updates the configuration for a SQL warehouse.
-        
+
         :param id: str
           Required. Id of the warehouse to configure.
         :param auto_stop_mins: int (optional)
           The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
           is automatically stopped.
-          
+
           Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
-          
+
           Defaults to 120 mins
         :param channel: :class:`Channel` (optional)
           Channel Details
@@ -8360,14 +10081,14 @@ def edit(
           Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
           to run larger queries on it. If you want to increase the number of concurrent queries, please tune
           max_num_clusters.
-          
+
           Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
           4X-Large
         :param creator_name: str (optional)
           warehouse creator name
         :param enable_photon: bool (optional)
           Configures whether the warehouse should use Photon optimized clusters.
-          
+
           Defaults to false.
         :param enable_serverless_compute: bool (optional)
           Configures whether the warehouse should use serverless compute.
@@ -8375,58 +10096,80 @@ def edit(
           Deprecated. Instance profile used to pass IAM role to the cluster
         :param max_num_clusters: int (optional)
           Maximum number of clusters that the autoscaler will create to handle concurrent queries.
-          
+
           Supported values: - Must be >= min_num_clusters - Must be <= 30.
-          
+
           Defaults to min_clusters if unset.
         :param min_num_clusters: int (optional)
           Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
           will ensure that a larger number of clusters are always running and therefore may reduce the cold
           start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
-          
+
           Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
-          
+
           Defaults to 1
         :param name: str (optional)
           Logical name for the cluster.
-          
+
           Supported values: - Must be unique within an org. - Must be less than 100 characters.
         :param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
           Configurations whether the warehouse should use spot instances.
         :param tags: :class:`EndpointTags` (optional)
           A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
           associated with this SQL warehouse.
-          
+
           Supported values: - Number of tags < 45.
         :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional)
           Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
           also set the field `enable_serverless_compute` to `true`.
-        
+
         :returns:
           Long-running operation waiter for :class:`GetWarehouseResponse`.
           See :method:wait_get_warehouse_running for more details.
         """
         body = {}
-        if auto_stop_mins is not None: body['auto_stop_mins'] = auto_stop_mins
-        if channel is not None: body['channel'] = channel.as_dict()
-        if cluster_size is not None: body['cluster_size'] = cluster_size
-        if creator_name is not None: body['creator_name'] = creator_name
-        if enable_photon is not None: body['enable_photon'] = enable_photon
+        if auto_stop_mins is not None:
+            body["auto_stop_mins"] = auto_stop_mins
+        if channel is not None:
+            body["channel"] = channel.as_dict()
+        if cluster_size is not None:
+            body["cluster_size"] = cluster_size
+        if creator_name is not None:
+            body["creator_name"] = creator_name
+        if enable_photon is not None:
+            body["enable_photon"] = enable_photon
         if enable_serverless_compute is not None:
-            body['enable_serverless_compute'] = enable_serverless_compute
-        if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn
-        if max_num_clusters is not None: body['max_num_clusters'] = max_num_clusters
-        if min_num_clusters is not None: body['min_num_clusters'] = min_num_clusters
-        if name is not None: body['name'] = name
-        if spot_instance_policy is not None: body['spot_instance_policy'] = spot_instance_policy.value
-        if tags is not None: body['tags'] = tags.as_dict()
-        if warehouse_type is not None: body['warehouse_type'] = warehouse_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', f'/api/2.0/sql/warehouses/{id}/edit', body=body, headers=headers)
-        return Wait(self.wait_get_warehouse_running,
-                    response=EditWarehouseResponse.from_dict(op_response),
-                    id=id)
+            body["enable_serverless_compute"] = enable_serverless_compute
+        if instance_profile_arn is not None:
+            body["instance_profile_arn"] = instance_profile_arn
+        if max_num_clusters is not None:
+            body["max_num_clusters"] = max_num_clusters
+        if min_num_clusters is not None:
+            body["min_num_clusters"] = min_num_clusters
+        if name is not None:
+            body["name"] = name
+        if spot_instance_policy is not None:
+            body["spot_instance_policy"] = spot_instance_policy.value
+        if tags is not None:
+            body["tags"] = tags.as_dict()
+        if warehouse_type is not None:
+            body["warehouse_type"] = warehouse_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "POST",
+            f"/api/2.0/sql/warehouses/{id}/edit",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_warehouse_running,
+            response=EditWarehouseResponse.from_dict(op_response),
+            id=id,
+        )
 
     def edit_and_wait(
         self,
@@ -8445,150 +10188,178 @@ def edit_and_wait(
         spot_instance_policy: Optional[SpotInstancePolicy] = None,
         tags: Optional[EndpointTags] = None,
         warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None,
-        timeout=timedelta(minutes=20)
+        timeout=timedelta(minutes=20),
     ) -> GetWarehouseResponse:
-        return self.edit(auto_stop_mins=auto_stop_mins,
-                         channel=channel,
-                         cluster_size=cluster_size,
-                         creator_name=creator_name,
-                         enable_photon=enable_photon,
-                         enable_serverless_compute=enable_serverless_compute,
-                         id=id,
-                         instance_profile_arn=instance_profile_arn,
-                         max_num_clusters=max_num_clusters,
-                         min_num_clusters=min_num_clusters,
-                         name=name,
-                         spot_instance_policy=spot_instance_policy,
-                         tags=tags,
-                         warehouse_type=warehouse_type).result(timeout=timeout)
+        return self.edit(
+            auto_stop_mins=auto_stop_mins,
+            channel=channel,
+            cluster_size=cluster_size,
+            creator_name=creator_name,
+            enable_photon=enable_photon,
+            enable_serverless_compute=enable_serverless_compute,
+            id=id,
+            instance_profile_arn=instance_profile_arn,
+            max_num_clusters=max_num_clusters,
+            min_num_clusters=min_num_clusters,
+            name=name,
+            spot_instance_policy=spot_instance_policy,
+            tags=tags,
+            warehouse_type=warehouse_type,
+        ).result(timeout=timeout)
 
     def get(self, id: str) -> GetWarehouseResponse:
         """Get warehouse info.
-        
+
         Gets the information for a single SQL warehouse.
-        
+
         :param id: str
           Required. Id of the SQL warehouse.
-        
+
         :returns: :class:`GetWarehouseResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/sql/warehouses/{id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/sql/warehouses/{id}", headers=headers)
         return GetWarehouseResponse.from_dict(res)
 
     def get_permission_levels(self, warehouse_id: str) -> GetWarehousePermissionLevelsResponse:
         """Get SQL warehouse permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
-        
+
         :returns: :class:`GetWarehousePermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/warehouses/{warehouse_id}/permissionLevels',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/warehouses/{warehouse_id}/permissionLevels",
+            headers=headers,
+        )
         return GetWarehousePermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, warehouse_id: str) -> WarehousePermissions:
         """Get SQL warehouse permissions.
-        
+
         Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root
         object.
-        
+
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
-        
+
         :returns: :class:`WarehousePermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/warehouses/{warehouse_id}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/warehouses/{warehouse_id}",
+            headers=headers,
+        )
         return WarehousePermissions.from_dict(res)
 
-    def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse:
+    def get_workspace_warehouse_config(
+        self,
+    ) -> GetWorkspaceWarehouseConfigResponse:
         """Get the workspace configuration.
-        
+
         Gets the workspace level configuration that is shared by all SQL warehouses in a workspace.
-        
+
         :returns: :class:`GetWorkspaceWarehouseConfigResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/sql/config/warehouses', headers=headers)
+        res = self._api.do("GET", "/api/2.0/sql/config/warehouses", headers=headers)
         return GetWorkspaceWarehouseConfigResponse.from_dict(res)
 
     def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]:
         """List warehouses.
-        
+
         Lists all SQL warehouses that a user has manager permissions on.
-        
+
         :param run_as_user_id: int (optional)
           Service Principal which will be used to fetch the list of warehouses. If not specified, the user
           from the session header is used.
-        
+
         :returns: Iterator over :class:`EndpointInfo`
         """
 
         query = {}
-        if run_as_user_id is not None: query['run_as_user_id'] = run_as_user_id
-        headers = {'Accept': 'application/json', }
+        if run_as_user_id is not None:
+            query["run_as_user_id"] = run_as_user_id
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/sql/warehouses', query=query, headers=headers)
+        json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers)
         parsed = ListWarehousesResponse.from_dict(json).warehouses
         return parsed if parsed is not None else []
 
-    def set_permissions(self,
-                        warehouse_id: str,
-                        *,
-                        access_control_list: Optional[List[WarehouseAccessControlRequest]] = None
-                        ) -> WarehousePermissions:
+    def set_permissions(
+        self,
+        warehouse_id: str,
+        *,
+        access_control_list: Optional[List[WarehouseAccessControlRequest]] = None,
+    ) -> WarehousePermissions:
         """Set SQL warehouse permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
         :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
-        
+
         :returns: :class:`WarehousePermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/warehouses/{warehouse_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/warehouses/{warehouse_id}",
+            body=body,
+            headers=headers,
+        )
         return WarehousePermissions.from_dict(res)
 
     def set_workspace_warehouse_config(
-            self,
-            *,
-            channel: Optional[Channel] = None,
-            config_param: Optional[RepeatedEndpointConfPairs] = None,
-            data_access_config: Optional[List[EndpointConfPair]] = None,
-            enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None,
-            global_param: Optional[RepeatedEndpointConfPairs] = None,
-            google_service_account: Optional[str] = None,
-            instance_profile_arn: Optional[str] = None,
-            security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None,
-            sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None):
+        self,
+        *,
+        channel: Optional[Channel] = None,
+        config_param: Optional[RepeatedEndpointConfPairs] = None,
+        data_access_config: Optional[List[EndpointConfPair]] = None,
+        enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None,
+        global_param: Optional[RepeatedEndpointConfPairs] = None,
+        google_service_account: Optional[str] = None,
+        instance_profile_arn: Optional[str] = None,
+        security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None,
+        sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None,
+    ):
         """Set the workspace configuration.
-        
+
         Sets the workspace level configuration that is shared by all SQL warehouses in a workspace.
-        
+
         :param channel: :class:`Channel` (optional)
           Optional: Channel selection details
         :param config_param: :class:`RepeatedEndpointConfPairs` (optional)
@@ -8611,95 +10382,118 @@ def set_workspace_warehouse_config(
           Security policy for warehouses
         :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional)
           SQL configuration parameters
-        
-        
+
+
         """
         body = {}
-        if channel is not None: body['channel'] = channel.as_dict()
-        if config_param is not None: body['config_param'] = config_param.as_dict()
+        if channel is not None:
+            body["channel"] = channel.as_dict()
+        if config_param is not None:
+            body["config_param"] = config_param.as_dict()
         if data_access_config is not None:
-            body['data_access_config'] = [v.as_dict() for v in data_access_config]
+            body["data_access_config"] = [v.as_dict() for v in data_access_config]
         if enabled_warehouse_types is not None:
-            body['enabled_warehouse_types'] = [v.as_dict() for v in enabled_warehouse_types]
-        if global_param is not None: body['global_param'] = global_param.as_dict()
-        if google_service_account is not None: body['google_service_account'] = google_service_account
-        if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn
-        if security_policy is not None: body['security_policy'] = security_policy.value
+            body["enabled_warehouse_types"] = [v.as_dict() for v in enabled_warehouse_types]
+        if global_param is not None:
+            body["global_param"] = global_param.as_dict()
+        if google_service_account is not None:
+            body["google_service_account"] = google_service_account
+        if instance_profile_arn is not None:
+            body["instance_profile_arn"] = instance_profile_arn
+        if security_policy is not None:
+            body["security_policy"] = security_policy.value
         if sql_configuration_parameters is not None:
-            body['sql_configuration_parameters'] = sql_configuration_parameters.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["sql_configuration_parameters"] = sql_configuration_parameters.as_dict()
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PUT', '/api/2.0/sql/config/warehouses', body=body, headers=headers)
+        self._api.do("PUT", "/api/2.0/sql/config/warehouses", body=body, headers=headers)
 
     def start(self, id: str) -> Wait[GetWarehouseResponse]:
         """Start a warehouse.
-        
+
         Starts a SQL warehouse.
-        
+
         :param id: str
           Required. Id of the SQL warehouse.
-        
+
         :returns:
           Long-running operation waiter for :class:`GetWarehouseResponse`.
           See :method:wait_get_warehouse_running for more details.
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        op_response = self._api.do('POST', f'/api/2.0/sql/warehouses/{id}/start', headers=headers)
-        return Wait(self.wait_get_warehouse_running,
-                    response=StartWarehouseResponse.from_dict(op_response),
-                    id=id)
+        op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/start", headers=headers)
+        return Wait(
+            self.wait_get_warehouse_running,
+            response=StartWarehouseResponse.from_dict(op_response),
+            id=id,
+        )
 
     def start_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse:
         return self.start(id=id).result(timeout=timeout)
 
     def stop(self, id: str) -> Wait[GetWarehouseResponse]:
         """Stop a warehouse.
-        
+
         Stops a SQL warehouse.
-        
+
         :param id: str
           Required. Id of the SQL warehouse.
-        
+
         :returns:
           Long-running operation waiter for :class:`GetWarehouseResponse`.
           See :method:wait_get_warehouse_stopped for more details.
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        op_response = self._api.do('POST', f'/api/2.0/sql/warehouses/{id}/stop', headers=headers)
-        return Wait(self.wait_get_warehouse_stopped,
-                    response=StopWarehouseResponse.from_dict(op_response),
-                    id=id)
+        op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/stop", headers=headers)
+        return Wait(
+            self.wait_get_warehouse_stopped,
+            response=StopWarehouseResponse.from_dict(op_response),
+            id=id,
+        )
 
     def stop_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse:
         return self.stop(id=id).result(timeout=timeout)
 
-    def update_permissions(self,
-                           warehouse_id: str,
-                           *,
-                           access_control_list: Optional[List[WarehouseAccessControlRequest]] = None
-                           ) -> WarehousePermissions:
+    def update_permissions(
+        self,
+        warehouse_id: str,
+        *,
+        access_control_list: Optional[List[WarehouseAccessControlRequest]] = None,
+    ) -> WarehousePermissions:
         """Update SQL warehouse permissions.
-        
+
         Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
         object.
-        
+
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
         :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
-        
+
         :returns: :class:`WarehousePermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/warehouses/{warehouse_id}',
-                           body=body,
-                           headers=headers)
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/warehouses/{warehouse_id}",
+            body=body,
+            headers=headers,
+        )
         return WarehousePermissions.from_dict(res)
diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py
index f1e6aeaa3..126c00fc5 100755
--- a/databricks/sdk/service/vectorsearch.py
+++ b/databricks/sdk/service/vectorsearch.py
@@ -13,7 +13,7 @@
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -26,19 +26,21 @@ class ColumnInfo:
     def as_dict(self) -> dict:
         """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.name is not None: body['name'] = self.name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
-        return cls(name=d.get('name', None))
+        return cls(name=d.get("name", None))
 
 
 @dataclass
@@ -52,21 +54,28 @@ class CreateEndpoint:
     def as_dict(self) -> dict:
         """Serializes the CreateEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type.value
-        if self.name is not None: body['name'] = self.name
+        if self.endpoint_type is not None:
+            body["endpoint_type"] = self.endpoint_type.value
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type
-        if self.name is not None: body['name'] = self.name
+        if self.endpoint_type is not None:
+            body["endpoint_type"] = self.endpoint_type
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateEndpoint:
         """Deserializes the CreateEndpoint from a dictionary."""
-        return cls(endpoint_type=_enum(d, 'endpoint_type', EndpointType), name=d.get('name', None))
+        return cls(
+            endpoint_type=_enum(d, "endpoint_type", EndpointType),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -97,37 +106,48 @@ class CreateVectorIndexRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateVectorIndexRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec.as_dict()
+        if self.delta_sync_index_spec:
+            body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict()
         if self.direct_access_index_spec:
-            body['direct_access_index_spec'] = self.direct_access_index_spec.as_dict()
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_type is not None: body['index_type'] = self.index_type.value
-        if self.name is not None: body['name'] = self.name
-        if self.primary_key is not None: body['primary_key'] = self.primary_key
+            body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict()
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_type is not None:
+            body["index_type"] = self.index_type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.primary_key is not None:
+            body["primary_key"] = self.primary_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec
-        if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_type is not None: body['index_type'] = self.index_type
-        if self.name is not None: body['name'] = self.name
-        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        if self.delta_sync_index_spec:
+            body["delta_sync_index_spec"] = self.delta_sync_index_spec
+        if self.direct_access_index_spec:
+            body["direct_access_index_spec"] = self.direct_access_index_spec
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_type is not None:
+            body["index_type"] = self.index_type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.primary_key is not None:
+            body["primary_key"] = self.primary_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexRequest:
         """Deserializes the CreateVectorIndexRequest from a dictionary."""
-        return cls(delta_sync_index_spec=_from_dict(d, 'delta_sync_index_spec',
-                                                    DeltaSyncVectorIndexSpecRequest),
-                   direct_access_index_spec=_from_dict(d, 'direct_access_index_spec',
-                                                       DirectAccessVectorIndexSpec),
-                   endpoint_name=d.get('endpoint_name', None),
-                   index_type=_enum(d, 'index_type', VectorIndexType),
-                   name=d.get('name', None),
-                   primary_key=d.get('primary_key', None))
+        return cls(
+            delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecRequest),
+            direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec),
+            endpoint_name=d.get("endpoint_name", None),
+            index_type=_enum(d, "index_type", VectorIndexType),
+            name=d.get("name", None),
+            primary_key=d.get("primary_key", None),
+        )
 
 
 @dataclass
@@ -137,19 +157,21 @@ class CreateVectorIndexResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.vector_index: body['vector_index'] = self.vector_index.as_dict()
+        if self.vector_index:
+            body["vector_index"] = self.vector_index.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateVectorIndexResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.vector_index: body['vector_index'] = self.vector_index
+        if self.vector_index:
+            body["vector_index"] = self.vector_index
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexResponse:
         """Deserializes the CreateVectorIndexResponse from a dictionary."""
-        return cls(vector_index=_from_dict(d, 'vector_index', VectorIndex))
+        return cls(vector_index=_from_dict(d, "vector_index", VectorIndex))
 
 
 @dataclass
@@ -165,30 +187,36 @@ class DeleteDataResult:
     def as_dict(self) -> dict:
         """Serializes the DeleteDataResult into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.failed_primary_keys: body['failed_primary_keys'] = [v for v in self.failed_primary_keys]
-        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        if self.failed_primary_keys:
+            body["failed_primary_keys"] = [v for v in self.failed_primary_keys]
+        if self.success_row_count is not None:
+            body["success_row_count"] = self.success_row_count
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDataResult into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys
-        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        if self.failed_primary_keys:
+            body["failed_primary_keys"] = self.failed_primary_keys
+        if self.success_row_count is not None:
+            body["success_row_count"] = self.success_row_count
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataResult:
         """Deserializes the DeleteDataResult from a dictionary."""
-        return cls(failed_primary_keys=d.get('failed_primary_keys', None),
-                   success_row_count=d.get('success_row_count', None))
+        return cls(
+            failed_primary_keys=d.get("failed_primary_keys", None),
+            success_row_count=d.get("success_row_count", None),
+        )
 
 
 class DeleteDataStatus(Enum):
     """Status of the delete operation."""
 
-    FAILURE = 'FAILURE'
-    PARTIAL_SUCCESS = 'PARTIAL_SUCCESS'
-    SUCCESS = 'SUCCESS'
+    FAILURE = "FAILURE"
+    PARTIAL_SUCCESS = "PARTIAL_SUCCESS"
+    SUCCESS = "SUCCESS"
 
 
 @dataclass
@@ -204,21 +232,28 @@ class DeleteDataVectorIndexRequest:
     def as_dict(self) -> dict:
         """Serializes the DeleteDataVectorIndexRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys]
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.primary_keys:
+            body["primary_keys"] = [v for v in self.primary_keys]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDataVectorIndexRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.primary_keys: body['primary_keys'] = self.primary_keys
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.primary_keys:
+            body["primary_keys"] = self.primary_keys
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexRequest:
         """Deserializes the DeleteDataVectorIndexRequest from a dictionary."""
-        return cls(index_name=d.get('index_name', None), primary_keys=d.get('primary_keys', None))
+        return cls(
+            index_name=d.get("index_name", None),
+            primary_keys=d.get("primary_keys", None),
+        )
 
 
 @dataclass
@@ -234,22 +269,28 @@ class DeleteDataVectorIndexResponse:
     def as_dict(self) -> dict:
         """Serializes the DeleteDataVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.result: body['result'] = self.result.as_dict()
-        if self.status is not None: body['status'] = self.status.value
+        if self.result:
+            body["result"] = self.result.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteDataVectorIndexResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.result: body['result'] = self.result
-        if self.status is not None: body['status'] = self.status
+        if self.result:
+            body["result"] = self.result
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexResponse:
         """Deserializes the DeleteDataVectorIndexResponse from a dictionary."""
-        return cls(result=_from_dict(d, 'result', DeleteDataResult),
-                   status=_enum(d, 'status', DeleteDataStatus))
+        return cls(
+            result=_from_dict(d, "result", DeleteDataResult),
+            status=_enum(d, "status", DeleteDataStatus),
+        )
 
 
 @dataclass
@@ -322,40 +363,48 @@ class DeltaSyncVectorIndexSpecRequest:
     def as_dict(self) -> dict:
         """Serializes the DeltaSyncVectorIndexSpecRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns_to_sync: body['columns_to_sync'] = [v for v in self.columns_to_sync]
+        if self.columns_to_sync:
+            body["columns_to_sync"] = [v for v in self.columns_to_sync]
         if self.embedding_source_columns:
-            body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
+            body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns]
         if self.embedding_vector_columns:
-            body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
+            body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns]
         if self.embedding_writeback_table is not None:
-            body['embedding_writeback_table'] = self.embedding_writeback_table
-        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value
-        if self.source_table is not None: body['source_table'] = self.source_table
+            body["embedding_writeback_table"] = self.embedding_writeback_table
+        if self.pipeline_type is not None:
+            body["pipeline_type"] = self.pipeline_type.value
+        if self.source_table is not None:
+            body["source_table"] = self.source_table
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeltaSyncVectorIndexSpecRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns_to_sync: body['columns_to_sync'] = self.columns_to_sync
-        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
-        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.columns_to_sync:
+            body["columns_to_sync"] = self.columns_to_sync
+        if self.embedding_source_columns:
+            body["embedding_source_columns"] = self.embedding_source_columns
+        if self.embedding_vector_columns:
+            body["embedding_vector_columns"] = self.embedding_vector_columns
         if self.embedding_writeback_table is not None:
-            body['embedding_writeback_table'] = self.embedding_writeback_table
-        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type
-        if self.source_table is not None: body['source_table'] = self.source_table
+            body["embedding_writeback_table"] = self.embedding_writeback_table
+        if self.pipeline_type is not None:
+            body["pipeline_type"] = self.pipeline_type
+        if self.source_table is not None:
+            body["source_table"] = self.source_table
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecRequest:
         """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary."""
-        return cls(columns_to_sync=d.get('columns_to_sync', None),
-                   embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
-                                                           EmbeddingSourceColumn),
-                   embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
-                                                           EmbeddingVectorColumn),
-                   embedding_writeback_table=d.get('embedding_writeback_table', None),
-                   pipeline_type=_enum(d, 'pipeline_type', PipelineType),
-                   source_table=d.get('source_table', None))
+        return cls(
+            columns_to_sync=d.get("columns_to_sync", None),
+            embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn),
+            embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn),
+            embedding_writeback_table=d.get("embedding_writeback_table", None),
+            pipeline_type=_enum(d, "pipeline_type", PipelineType),
+            source_table=d.get("source_table", None),
+        )
 
 
 @dataclass
@@ -388,39 +437,47 @@ def as_dict(self) -> dict:
         """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.embedding_source_columns:
-            body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
+            body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns]
         if self.embedding_vector_columns:
-            body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
+            body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns]
         if self.embedding_writeback_table is not None:
-            body['embedding_writeback_table'] = self.embedding_writeback_table
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value
-        if self.source_table is not None: body['source_table'] = self.source_table
+            body["embedding_writeback_table"] = self.embedding_writeback_table
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.pipeline_type is not None:
+            body["pipeline_type"] = self.pipeline_type.value
+        if self.source_table is not None:
+            body["source_table"] = self.source_table
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
-        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.embedding_source_columns:
+            body["embedding_source_columns"] = self.embedding_source_columns
+        if self.embedding_vector_columns:
+            body["embedding_vector_columns"] = self.embedding_vector_columns
         if self.embedding_writeback_table is not None:
-            body['embedding_writeback_table'] = self.embedding_writeback_table
-        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
-        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type
-        if self.source_table is not None: body['source_table'] = self.source_table
+            body["embedding_writeback_table"] = self.embedding_writeback_table
+        if self.pipeline_id is not None:
+            body["pipeline_id"] = self.pipeline_id
+        if self.pipeline_type is not None:
+            body["pipeline_type"] = self.pipeline_type
+        if self.source_table is not None:
+            body["source_table"] = self.source_table
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecResponse:
         """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary."""
-        return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
-                                                           EmbeddingSourceColumn),
-                   embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
-                                                           EmbeddingVectorColumn),
-                   embedding_writeback_table=d.get('embedding_writeback_table', None),
-                   pipeline_id=d.get('pipeline_id', None),
-                   pipeline_type=_enum(d, 'pipeline_type', PipelineType),
-                   source_table=d.get('source_table', None))
+        return cls(
+            embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn),
+            embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn),
+            embedding_writeback_table=d.get("embedding_writeback_table", None),
+            pipeline_id=d.get("pipeline_id", None),
+            pipeline_type=_enum(d, "pipeline_type", PipelineType),
+            source_table=d.get("source_table", None),
+        )
 
 
 @dataclass
@@ -442,28 +499,32 @@ def as_dict(self) -> dict:
         """Serializes the DirectAccessVectorIndexSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.embedding_source_columns:
-            body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
+            body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns]
         if self.embedding_vector_columns:
-            body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
-        if self.schema_json is not None: body['schema_json'] = self.schema_json
+            body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns]
+        if self.schema_json is not None:
+            body["schema_json"] = self.schema_json
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DirectAccessVectorIndexSpec into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
-        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
-        if self.schema_json is not None: body['schema_json'] = self.schema_json
+        if self.embedding_source_columns:
+            body["embedding_source_columns"] = self.embedding_source_columns
+        if self.embedding_vector_columns:
+            body["embedding_vector_columns"] = self.embedding_vector_columns
+        if self.schema_json is not None:
+            body["schema_json"] = self.schema_json
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DirectAccessVectorIndexSpec:
         """Deserializes the DirectAccessVectorIndexSpec from a dictionary."""
-        return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
-                                                           EmbeddingSourceColumn),
-                   embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
-                                                           EmbeddingVectorColumn),
-                   schema_json=d.get('schema_json', None))
+        return cls(
+            embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn),
+            embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn),
+            schema_json=d.get("schema_json", None),
+        )
 
 
 @dataclass
@@ -478,23 +539,27 @@ def as_dict(self) -> dict:
         """Serializes the EmbeddingSourceColumn into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.embedding_model_endpoint_name is not None:
-            body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name
-        if self.name is not None: body['name'] = self.name
+            body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EmbeddingSourceColumn into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.embedding_model_endpoint_name is not None:
-            body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name
-        if self.name is not None: body['name'] = self.name
+            body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingSourceColumn:
         """Deserializes the EmbeddingSourceColumn from a dictionary."""
-        return cls(embedding_model_endpoint_name=d.get('embedding_model_endpoint_name', None),
-                   name=d.get('name', None))
+        return cls(
+            embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -508,21 +573,28 @@ class EmbeddingVectorColumn:
     def as_dict(self) -> dict:
         """Serializes the EmbeddingVectorColumn into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension
-        if self.name is not None: body['name'] = self.name
+        if self.embedding_dimension is not None:
+            body["embedding_dimension"] = self.embedding_dimension
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EmbeddingVectorColumn into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension
-        if self.name is not None: body['name'] = self.name
+        if self.embedding_dimension is not None:
+            body["embedding_dimension"] = self.embedding_dimension
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingVectorColumn:
         """Deserializes the EmbeddingVectorColumn from a dictionary."""
-        return cls(embedding_dimension=d.get('embedding_dimension', None), name=d.get('name', None))
+        return cls(
+            embedding_dimension=d.get("embedding_dimension", None),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -557,45 +629,63 @@ class EndpointInfo:
     def as_dict(self) -> dict:
         """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.endpoint_status: body['endpoint_status'] = self.endpoint_status.as_dict()
-        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type.value
-        if self.id is not None: body['id'] = self.id
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.endpoint_status:
+            body["endpoint_status"] = self.endpoint_status.as_dict()
+        if self.endpoint_type is not None:
+            body["endpoint_type"] = self.endpoint_type.value
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user
-        if self.name is not None: body['name'] = self.name
-        if self.num_indexes is not None: body['num_indexes'] = self.num_indexes
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.last_updated_user is not None:
+            body["last_updated_user"] = self.last_updated_user
+        if self.name is not None:
+            body["name"] = self.name
+        if self.num_indexes is not None:
+            body["num_indexes"] = self.num_indexes
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
-        if self.creator is not None: body['creator'] = self.creator
-        if self.endpoint_status: body['endpoint_status'] = self.endpoint_status
-        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type
-        if self.id is not None: body['id'] = self.id
+        if self.creation_timestamp is not None:
+            body["creation_timestamp"] = self.creation_timestamp
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.endpoint_status:
+            body["endpoint_status"] = self.endpoint_status
+        if self.endpoint_type is not None:
+            body["endpoint_type"] = self.endpoint_type
+        if self.id is not None:
+            body["id"] = self.id
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
-        if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user
-        if self.name is not None: body['name'] = self.name
-        if self.num_indexes is not None: body['num_indexes'] = self.num_indexes
+            body["last_updated_timestamp"] = self.last_updated_timestamp
+        if self.last_updated_user is not None:
+            body["last_updated_user"] = self.last_updated_user
+        if self.name is not None:
+            body["name"] = self.name
+        if self.num_indexes is not None:
+            body["num_indexes"] = self.num_indexes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointInfo:
         """Deserializes the EndpointInfo from a dictionary."""
-        return cls(creation_timestamp=d.get('creation_timestamp', None),
-                   creator=d.get('creator', None),
-                   endpoint_status=_from_dict(d, 'endpoint_status', EndpointStatus),
-                   endpoint_type=_enum(d, 'endpoint_type', EndpointType),
-                   id=d.get('id', None),
-                   last_updated_timestamp=d.get('last_updated_timestamp', None),
-                   last_updated_user=d.get('last_updated_user', None),
-                   name=d.get('name', None),
-                   num_indexes=d.get('num_indexes', None))
+        return cls(
+            creation_timestamp=d.get("creation_timestamp", None),
+            creator=d.get("creator", None),
+            endpoint_status=_from_dict(d, "endpoint_status", EndpointStatus),
+            endpoint_type=_enum(d, "endpoint_type", EndpointType),
+            id=d.get("id", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+            last_updated_user=d.get("last_updated_user", None),
+            name=d.get("name", None),
+            num_indexes=d.get("num_indexes", None),
+        )
 
 
 @dataclass
@@ -611,35 +701,42 @@ class EndpointStatus:
     def as_dict(self) -> dict:
         """Serializes the EndpointStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state.value
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the EndpointStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.message is not None: body['message'] = self.message
-        if self.state is not None: body['state'] = self.state
+        if self.message is not None:
+            body["message"] = self.message
+        if self.state is not None:
+            body["state"] = self.state
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointStatus:
         """Deserializes the EndpointStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', EndpointStatusState))
+        return cls(
+            message=d.get("message", None),
+            state=_enum(d, "state", EndpointStatusState),
+        )
 
 
 class EndpointStatusState(Enum):
     """Current state of the endpoint"""
 
-    OFFLINE = 'OFFLINE'
-    ONLINE = 'ONLINE'
-    PROVISIONING = 'PROVISIONING'
+    OFFLINE = "OFFLINE"
+    ONLINE = "ONLINE"
+    PROVISIONING = "PROVISIONING"
 
 
 class EndpointType(Enum):
     """Type of endpoint."""
 
-    STANDARD = 'STANDARD'
+    STANDARD = "STANDARD"
 
 
 @dataclass
@@ -654,22 +751,28 @@ class ListEndpointResponse:
     def as_dict(self) -> dict:
         """Serializes the ListEndpointResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.endpoints:
+            body["endpoints"] = [v.as_dict() for v in self.endpoints]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListEndpointResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.endpoints: body['endpoints'] = self.endpoints
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.endpoints:
+            body["endpoints"] = self.endpoints
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListEndpointResponse:
         """Deserializes the ListEndpointResponse from a dictionary."""
-        return cls(endpoints=_repeated_dict(d, 'endpoints', EndpointInfo),
-                   next_page_token=d.get('next_page_token', None))
+        return cls(
+            endpoints=_repeated_dict(d, "endpoints", EndpointInfo),
+            next_page_token=d.get("next_page_token", None),
+        )
 
 
 @dataclass
@@ -679,19 +782,21 @@ class ListValue:
     def as_dict(self) -> dict:
         """Serializes the ListValue into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.values: body['values'] = [v.as_dict() for v in self.values]
+        if self.values:
+            body["values"] = [v.as_dict() for v in self.values]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListValue into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.values: body['values'] = self.values
+        if self.values:
+            body["values"] = self.values
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListValue:
         """Deserializes the ListValue from a dictionary."""
-        return cls(values=_repeated_dict(d, 'values', Value))
+        return cls(values=_repeated_dict(d, "values", Value))
 
 
 @dataclass
@@ -705,22 +810,28 @@ class ListVectorIndexesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListVectorIndexesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.vector_indexes: body['vector_indexes'] = [v.as_dict() for v in self.vector_indexes]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.vector_indexes:
+            body["vector_indexes"] = [v.as_dict() for v in self.vector_indexes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListVectorIndexesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.vector_indexes: body['vector_indexes'] = self.vector_indexes
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.vector_indexes:
+            body["vector_indexes"] = self.vector_indexes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVectorIndexesResponse:
         """Deserializes the ListVectorIndexesResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None),
-                   vector_indexes=_repeated_dict(d, 'vector_indexes', MiniVectorIndex))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            vector_indexes=_repeated_dict(d, "vector_indexes", MiniVectorIndex),
+        )
 
 
 @dataclass
@@ -736,21 +847,25 @@ class MapStringValueEntry:
     def as_dict(self) -> dict:
         """Serializes the MapStringValueEntry into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value: body['value'] = self.value.as_dict()
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value:
+            body["value"] = self.value.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MapStringValueEntry into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MapStringValueEntry:
         """Deserializes the MapStringValueEntry from a dictionary."""
-        return cls(key=d.get('key', None), value=_from_dict(d, 'value', Value))
+        return cls(key=d.get("key", None), value=_from_dict(d, "value", Value))
 
 
 @dataclass
@@ -778,44 +893,56 @@ class MiniVectorIndex:
     def as_dict(self) -> dict:
         """Serializes the MiniVectorIndex into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creator is not None: body['creator'] = self.creator
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_type is not None: body['index_type'] = self.index_type.value
-        if self.name is not None: body['name'] = self.name
-        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_type is not None:
+            body["index_type"] = self.index_type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.primary_key is not None:
+            body["primary_key"] = self.primary_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the MiniVectorIndex into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creator is not None: body['creator'] = self.creator
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_type is not None: body['index_type'] = self.index_type
-        if self.name is not None: body['name'] = self.name
-        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_type is not None:
+            body["index_type"] = self.index_type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.primary_key is not None:
+            body["primary_key"] = self.primary_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MiniVectorIndex:
         """Deserializes the MiniVectorIndex from a dictionary."""
-        return cls(creator=d.get('creator', None),
-                   endpoint_name=d.get('endpoint_name', None),
-                   index_type=_enum(d, 'index_type', VectorIndexType),
-                   name=d.get('name', None),
-                   primary_key=d.get('primary_key', None))
+        return cls(
+            creator=d.get("creator", None),
+            endpoint_name=d.get("endpoint_name", None),
+            index_type=_enum(d, "index_type", VectorIndexType),
+            name=d.get("name", None),
+            primary_key=d.get("primary_key", None),
+        )
 
 
 class PipelineType(Enum):
     """Pipeline execution mode.
-    
+
     - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing
     after successfully refreshing the source table in the pipeline once, ensuring the table is
     updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline
     uses continuous execution, the pipeline processes new data as it arrives in the source table to
     keep vector index fresh."""
 
-    CONTINUOUS = 'CONTINUOUS'
-    TRIGGERED = 'TRIGGERED'
+    CONTINUOUS = "CONTINUOUS"
+    TRIGGERED = "TRIGGERED"
 
 
 @dataclass
@@ -834,25 +961,33 @@ class QueryVectorIndexNextPageRequest:
     def as_dict(self) -> dict:
         """Serializes the QueryVectorIndexNextPageRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.page_token is not None:
+            body["page_token"] = self.page_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexNextPageRequest:
         """Deserializes the QueryVectorIndexNextPageRequest from a dictionary."""
-        return cls(endpoint_name=d.get('endpoint_name', None),
-                   index_name=d.get('index_name', None),
-                   page_token=d.get('page_token', None))
+        return cls(
+            endpoint_name=d.get("endpoint_name", None),
+            index_name=d.get("index_name", None),
+            page_token=d.get("page_token", None),
+        )
 
 
 @dataclass
@@ -889,40 +1024,58 @@ class QueryVectorIndexRequest:
     def as_dict(self) -> dict:
         """Serializes the QueryVectorIndexRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.columns: body['columns'] = [v for v in self.columns]
-        if self.filters_json is not None: body['filters_json'] = self.filters_json
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.num_results is not None: body['num_results'] = self.num_results
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.query_type is not None: body['query_type'] = self.query_type
-        if self.query_vector: body['query_vector'] = [v for v in self.query_vector]
-        if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
+        if self.columns:
+            body["columns"] = [v for v in self.columns]
+        if self.filters_json is not None:
+            body["filters_json"] = self.filters_json
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.num_results is not None:
+            body["num_results"] = self.num_results
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.query_type is not None:
+            body["query_type"] = self.query_type
+        if self.query_vector:
+            body["query_vector"] = [v for v in self.query_vector]
+        if self.score_threshold is not None:
+            body["score_threshold"] = self.score_threshold
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.columns: body['columns'] = self.columns
-        if self.filters_json is not None: body['filters_json'] = self.filters_json
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.num_results is not None: body['num_results'] = self.num_results
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.query_type is not None: body['query_type'] = self.query_type
-        if self.query_vector: body['query_vector'] = self.query_vector
-        if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
+        if self.columns:
+            body["columns"] = self.columns
+        if self.filters_json is not None:
+            body["filters_json"] = self.filters_json
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.num_results is not None:
+            body["num_results"] = self.num_results
+        if self.query_text is not None:
+            body["query_text"] = self.query_text
+        if self.query_type is not None:
+            body["query_type"] = self.query_type
+        if self.query_vector:
+            body["query_vector"] = self.query_vector
+        if self.score_threshold is not None:
+            body["score_threshold"] = self.score_threshold
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexRequest:
         """Deserializes the QueryVectorIndexRequest from a dictionary."""
-        return cls(columns=d.get('columns', None),
-                   filters_json=d.get('filters_json', None),
-                   index_name=d.get('index_name', None),
-                   num_results=d.get('num_results', None),
-                   query_text=d.get('query_text', None),
-                   query_type=d.get('query_type', None),
-                   query_vector=d.get('query_vector', None),
-                   score_threshold=d.get('score_threshold', None))
+        return cls(
+            columns=d.get("columns", None),
+            filters_json=d.get("filters_json", None),
+            index_name=d.get("index_name", None),
+            num_results=d.get("num_results", None),
+            query_text=d.get("query_text", None),
+            query_type=d.get("query_type", None),
+            query_vector=d.get("query_vector", None),
+            score_threshold=d.get("score_threshold", None),
+        )
 
 
 @dataclass
@@ -941,25 +1094,33 @@ class QueryVectorIndexResponse:
     def as_dict(self) -> dict:
         """Serializes the QueryVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.manifest: body['manifest'] = self.manifest.as_dict()
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.result: body['result'] = self.result.as_dict()
+        if self.manifest:
+            body["manifest"] = self.manifest.as_dict()
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.result:
+            body["result"] = self.result.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the QueryVectorIndexResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.manifest: body['manifest'] = self.manifest
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.result: body['result'] = self.result
+        if self.manifest:
+            body["manifest"] = self.manifest
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.result:
+            body["result"] = self.result
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexResponse:
         """Deserializes the QueryVectorIndexResponse from a dictionary."""
-        return cls(manifest=_from_dict(d, 'manifest', ResultManifest),
-                   next_page_token=d.get('next_page_token', None),
-                   result=_from_dict(d, 'result', ResultData))
+        return cls(
+            manifest=_from_dict(d, "manifest", ResultManifest),
+            next_page_token=d.get("next_page_token", None),
+            result=_from_dict(d, "result", ResultData),
+        )
 
 
 @dataclass
@@ -975,21 +1136,28 @@ class ResultData:
     def as_dict(self) -> dict:
         """Serializes the ResultData into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data_array: body['data_array'] = [v for v in self.data_array]
-        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.data_array:
+            body["data_array"] = [v for v in self.data_array]
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResultData into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data_array: body['data_array'] = self.data_array
-        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.data_array:
+            body["data_array"] = self.data_array
+        if self.row_count is not None:
+            body["row_count"] = self.row_count
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultData:
         """Deserializes the ResultData from a dictionary."""
-        return cls(data_array=d.get('data_array', None), row_count=d.get('row_count', None))
+        return cls(
+            data_array=d.get("data_array", None),
+            row_count=d.get("row_count", None),
+        )
 
 
 @dataclass
@@ -1005,21 +1173,28 @@ class ResultManifest:
     def as_dict(self) -> dict:
         """Serializes the ResultManifest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.column_count is not None: body['column_count'] = self.column_count
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.column_count is not None:
+            body["column_count"] = self.column_count
+        if self.columns:
+            body["columns"] = [v.as_dict() for v in self.columns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ResultManifest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.column_count is not None: body['column_count'] = self.column_count
-        if self.columns: body['columns'] = self.columns
+        if self.column_count is not None:
+            body["column_count"] = self.column_count
+        if self.columns:
+            body["columns"] = self.columns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultManifest:
         """Deserializes the ResultManifest from a dictionary."""
-        return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo))
+        return cls(
+            column_count=d.get("column_count", None),
+            columns=_repeated_dict(d, "columns", ColumnInfo),
+        )
 
 
 @dataclass
@@ -1038,25 +1213,33 @@ class ScanVectorIndexRequest:
     def as_dict(self) -> dict:
         """Serializes the ScanVectorIndexRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
-        if self.num_results is not None: body['num_results'] = self.num_results
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.last_primary_key is not None:
+            body["last_primary_key"] = self.last_primary_key
+        if self.num_results is not None:
+            body["num_results"] = self.num_results
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
-        if self.num_results is not None: body['num_results'] = self.num_results
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.last_primary_key is not None:
+            body["last_primary_key"] = self.last_primary_key
+        if self.num_results is not None:
+            body["num_results"] = self.num_results
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexRequest:
         """Deserializes the ScanVectorIndexRequest from a dictionary."""
-        return cls(index_name=d.get('index_name', None),
-                   last_primary_key=d.get('last_primary_key', None),
-                   num_results=d.get('num_results', None))
+        return cls(
+            index_name=d.get("index_name", None),
+            last_primary_key=d.get("last_primary_key", None),
+            num_results=d.get("num_results", None),
+        )
 
 
 @dataclass
@@ -1072,21 +1255,28 @@ class ScanVectorIndexResponse:
     def as_dict(self) -> dict:
         """Serializes the ScanVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.data: body['data'] = [v.as_dict() for v in self.data]
-        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
+        if self.data:
+            body["data"] = [v.as_dict() for v in self.data]
+        if self.last_primary_key is not None:
+            body["last_primary_key"] = self.last_primary_key
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ScanVectorIndexResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.data: body['data'] = self.data
-        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
+        if self.data:
+            body["data"] = self.data
+        if self.last_primary_key is not None:
+            body["last_primary_key"] = self.last_primary_key
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexResponse:
         """Deserializes the ScanVectorIndexResponse from a dictionary."""
-        return cls(data=_repeated_dict(d, 'data', Struct), last_primary_key=d.get('last_primary_key', None))
+        return cls(
+            data=_repeated_dict(d, "data", Struct),
+            last_primary_key=d.get("last_primary_key", None),
+        )
 
 
 @dataclass
@@ -1097,19 +1287,21 @@ class Struct:
     def as_dict(self) -> dict:
         """Serializes the Struct into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.fields: body['fields'] = [v.as_dict() for v in self.fields]
+        if self.fields:
+            body["fields"] = [v.as_dict() for v in self.fields]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Struct into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.fields: body['fields'] = self.fields
+        if self.fields:
+            body["fields"] = self.fields
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Struct:
         """Deserializes the Struct from a dictionary."""
-        return cls(fields=_repeated_dict(d, 'fields', MapStringValueEntry))
+        return cls(fields=_repeated_dict(d, "fields", MapStringValueEntry))
 
 
 @dataclass
@@ -1144,30 +1336,36 @@ class UpsertDataResult:
     def as_dict(self) -> dict:
         """Serializes the UpsertDataResult into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.failed_primary_keys: body['failed_primary_keys'] = [v for v in self.failed_primary_keys]
-        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        if self.failed_primary_keys:
+            body["failed_primary_keys"] = [v for v in self.failed_primary_keys]
+        if self.success_row_count is not None:
+            body["success_row_count"] = self.success_row_count
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpsertDataResult into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys
-        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        if self.failed_primary_keys:
+            body["failed_primary_keys"] = self.failed_primary_keys
+        if self.success_row_count is not None:
+            body["success_row_count"] = self.success_row_count
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataResult:
         """Deserializes the UpsertDataResult from a dictionary."""
-        return cls(failed_primary_keys=d.get('failed_primary_keys', None),
-                   success_row_count=d.get('success_row_count', None))
+        return cls(
+            failed_primary_keys=d.get("failed_primary_keys", None),
+            success_row_count=d.get("success_row_count", None),
+        )
 
 
 class UpsertDataStatus(Enum):
     """Status of the upsert operation."""
 
-    FAILURE = 'FAILURE'
-    PARTIAL_SUCCESS = 'PARTIAL_SUCCESS'
-    SUCCESS = 'SUCCESS'
+    FAILURE = "FAILURE"
+    PARTIAL_SUCCESS = "PARTIAL_SUCCESS"
+    SUCCESS = "SUCCESS"
 
 
 @dataclass
@@ -1183,21 +1381,28 @@ class UpsertDataVectorIndexRequest:
     def as_dict(self) -> dict:
         """Serializes the UpsertDataVectorIndexRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.inputs_json is not None: body['inputs_json'] = self.inputs_json
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.inputs_json is not None:
+            body["inputs_json"] = self.inputs_json
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.index_name is not None: body['index_name'] = self.index_name
-        if self.inputs_json is not None: body['inputs_json'] = self.inputs_json
+        if self.index_name is not None:
+            body["index_name"] = self.index_name
+        if self.inputs_json is not None:
+            body["inputs_json"] = self.inputs_json
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexRequest:
         """Deserializes the UpsertDataVectorIndexRequest from a dictionary."""
-        return cls(index_name=d.get('index_name', None), inputs_json=d.get('inputs_json', None))
+        return cls(
+            index_name=d.get("index_name", None),
+            inputs_json=d.get("inputs_json", None),
+        )
 
 
 @dataclass
@@ -1213,22 +1418,28 @@ class UpsertDataVectorIndexResponse:
     def as_dict(self) -> dict:
         """Serializes the UpsertDataVectorIndexResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.result: body['result'] = self.result.as_dict()
-        if self.status is not None: body['status'] = self.status.value
+        if self.result:
+            body["result"] = self.result.as_dict()
+        if self.status is not None:
+            body["status"] = self.status.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpsertDataVectorIndexResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.result: body['result'] = self.result
-        if self.status is not None: body['status'] = self.status
+        if self.result:
+            body["result"] = self.result
+        if self.status is not None:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexResponse:
         """Deserializes the UpsertDataVectorIndexResponse from a dictionary."""
-        return cls(result=_from_dict(d, 'result', UpsertDataResult),
-                   status=_enum(d, 'status', UpsertDataStatus))
+        return cls(
+            result=_from_dict(d, "result", UpsertDataResult),
+            status=_enum(d, "status", UpsertDataStatus),
+        )
 
 
 @dataclass
@@ -1248,34 +1459,48 @@ class Value:
     def as_dict(self) -> dict:
         """Serializes the Value into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bool_value is not None: body['bool_value'] = self.bool_value
-        if self.list_value: body['list_value'] = self.list_value.as_dict()
-        if self.null_value is not None: body['null_value'] = self.null_value
-        if self.number_value is not None: body['number_value'] = self.number_value
-        if self.string_value is not None: body['string_value'] = self.string_value
-        if self.struct_value: body['struct_value'] = self.struct_value.as_dict()
+        if self.bool_value is not None:
+            body["bool_value"] = self.bool_value
+        if self.list_value:
+            body["list_value"] = self.list_value.as_dict()
+        if self.null_value is not None:
+            body["null_value"] = self.null_value
+        if self.number_value is not None:
+            body["number_value"] = self.number_value
+        if self.string_value is not None:
+            body["string_value"] = self.string_value
+        if self.struct_value:
+            body["struct_value"] = self.struct_value.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Value into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bool_value is not None: body['bool_value'] = self.bool_value
-        if self.list_value: body['list_value'] = self.list_value
-        if self.null_value is not None: body['null_value'] = self.null_value
-        if self.number_value is not None: body['number_value'] = self.number_value
-        if self.string_value is not None: body['string_value'] = self.string_value
-        if self.struct_value: body['struct_value'] = self.struct_value
+        if self.bool_value is not None:
+            body["bool_value"] = self.bool_value
+        if self.list_value:
+            body["list_value"] = self.list_value
+        if self.null_value is not None:
+            body["null_value"] = self.null_value
+        if self.number_value is not None:
+            body["number_value"] = self.number_value
+        if self.string_value is not None:
+            body["string_value"] = self.string_value
+        if self.struct_value:
+            body["struct_value"] = self.struct_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Value:
         """Deserializes the Value from a dictionary."""
-        return cls(bool_value=d.get('bool_value', None),
-                   list_value=_from_dict(d, 'list_value', ListValue),
-                   null_value=d.get('null_value', None),
-                   number_value=d.get('number_value', None),
-                   string_value=d.get('string_value', None),
-                   struct_value=_from_dict(d, 'struct_value', Struct))
+        return cls(
+            bool_value=d.get("bool_value", None),
+            list_value=_from_dict(d, "list_value", ListValue),
+            null_value=d.get("null_value", None),
+            number_value=d.get("number_value", None),
+            string_value=d.get("string_value", None),
+            struct_value=_from_dict(d, "struct_value", Struct),
+        )
 
 
 @dataclass
@@ -1309,43 +1534,58 @@ class VectorIndex:
     def as_dict(self) -> dict:
         """Serializes the VectorIndex into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.creator is not None: body['creator'] = self.creator
-        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec.as_dict()
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.delta_sync_index_spec:
+            body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict()
         if self.direct_access_index_spec:
-            body['direct_access_index_spec'] = self.direct_access_index_spec.as_dict()
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_type is not None: body['index_type'] = self.index_type.value
-        if self.name is not None: body['name'] = self.name
-        if self.primary_key is not None: body['primary_key'] = self.primary_key
-        if self.status: body['status'] = self.status.as_dict()
+            body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict()
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_type is not None:
+            body["index_type"] = self.index_type.value
+        if self.name is not None:
+            body["name"] = self.name
+        if self.primary_key is not None:
+            body["primary_key"] = self.primary_key
+        if self.status:
+            body["status"] = self.status.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the VectorIndex into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.creator is not None: body['creator'] = self.creator
-        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec
-        if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec
-        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.index_type is not None: body['index_type'] = self.index_type
-        if self.name is not None: body['name'] = self.name
-        if self.primary_key is not None: body['primary_key'] = self.primary_key
-        if self.status: body['status'] = self.status
+        if self.creator is not None:
+            body["creator"] = self.creator
+        if self.delta_sync_index_spec:
+            body["delta_sync_index_spec"] = self.delta_sync_index_spec
+        if self.direct_access_index_spec:
+            body["direct_access_index_spec"] = self.direct_access_index_spec
+        if self.endpoint_name is not None:
+            body["endpoint_name"] = self.endpoint_name
+        if self.index_type is not None:
+            body["index_type"] = self.index_type
+        if self.name is not None:
+            body["name"] = self.name
+        if self.primary_key is not None:
+            body["primary_key"] = self.primary_key
+        if self.status:
+            body["status"] = self.status
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VectorIndex:
         """Deserializes the VectorIndex from a dictionary."""
-        return cls(creator=d.get('creator', None),
-                   delta_sync_index_spec=_from_dict(d, 'delta_sync_index_spec',
-                                                    DeltaSyncVectorIndexSpecResponse),
-                   direct_access_index_spec=_from_dict(d, 'direct_access_index_spec',
-                                                       DirectAccessVectorIndexSpec),
-                   endpoint_name=d.get('endpoint_name', None),
-                   index_type=_enum(d, 'index_type', VectorIndexType),
-                   name=d.get('name', None),
-                   primary_key=d.get('primary_key', None),
-                   status=_from_dict(d, 'status', VectorIndexStatus))
+        return cls(
+            creator=d.get("creator", None),
+            delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecResponse),
+            direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec),
+            endpoint_name=d.get("endpoint_name", None),
+            index_type=_enum(d, "index_type", VectorIndexType),
+            name=d.get("name", None),
+            primary_key=d.get("primary_key", None),
+            status=_from_dict(d, "status", VectorIndexStatus),
+        )
 
 
 @dataclass
@@ -1365,40 +1605,50 @@ class VectorIndexStatus:
     def as_dict(self) -> dict:
         """Serializes the VectorIndexStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.index_url is not None: body['index_url'] = self.index_url
-        if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count
-        if self.message is not None: body['message'] = self.message
-        if self.ready is not None: body['ready'] = self.ready
+        if self.index_url is not None:
+            body["index_url"] = self.index_url
+        if self.indexed_row_count is not None:
+            body["indexed_row_count"] = self.indexed_row_count
+        if self.message is not None:
+            body["message"] = self.message
+        if self.ready is not None:
+            body["ready"] = self.ready
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the VectorIndexStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.index_url is not None: body['index_url'] = self.index_url
-        if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count
-        if self.message is not None: body['message'] = self.message
-        if self.ready is not None: body['ready'] = self.ready
+        if self.index_url is not None:
+            body["index_url"] = self.index_url
+        if self.indexed_row_count is not None:
+            body["indexed_row_count"] = self.indexed_row_count
+        if self.message is not None:
+            body["message"] = self.message
+        if self.ready is not None:
+            body["ready"] = self.ready
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VectorIndexStatus:
         """Deserializes the VectorIndexStatus from a dictionary."""
-        return cls(index_url=d.get('index_url', None),
-                   indexed_row_count=d.get('indexed_row_count', None),
-                   message=d.get('message', None),
-                   ready=d.get('ready', None))
+        return cls(
+            index_url=d.get("index_url", None),
+            indexed_row_count=d.get("indexed_row_count", None),
+            message=d.get("message", None),
+            ready=d.get("ready", None),
+        )
 
 
 class VectorIndexType(Enum):
     """There are 2 types of Vector Search indexes:
-    
+
     - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
     incrementally updating the index as the underlying data in the Delta Table changes. -
     `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through
     our REST and SDK APIs. With this model, the user manages index updates."""
 
-    DELTA_SYNC = 'DELTA_SYNC'
-    DIRECT_ACCESS = 'DIRECT_ACCESS'
+    DELTA_SYNC = "DELTA_SYNC"
+    DIRECT_ACCESS = "DIRECT_ACCESS"
 
 
 class VectorSearchEndpointsAPI:
@@ -1408,19 +1658,20 @@ def __init__(self, api_client):
         self._api = api_client
 
     def wait_get_endpoint_vector_search_endpoint_online(
-            self,
-            endpoint_name: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[EndpointInfo], None]] = None) -> EndpointInfo:
+        self,
+        endpoint_name: str,
+        timeout=timedelta(minutes=20),
+        callback: Optional[Callable[[EndpointInfo], None]] = None,
+    ) -> EndpointInfo:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (EndpointStatusState.ONLINE, )
-        failure_states = (EndpointStatusState.OFFLINE, )
-        status_message = 'polling...'
+        target_states = (EndpointStatusState.ONLINE,)
+        failure_states = (EndpointStatusState.OFFLINE,)
+        status_message = "polling..."
         attempt = 1
         while time.time() < deadline:
             poll = self.get_endpoint(endpoint_name=endpoint_name)
             status = poll.endpoint_status.state
-            status_message = f'current status: {status}'
+            status_message = f"current status: {status}"
             if poll.endpoint_status:
                 status_message = poll.endpoint_status.message
             if status in target_states:
@@ -1428,122 +1679,157 @@ def wait_get_endpoint_vector_search_endpoint_online(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach ONLINE, got {status}: {status_message}'
+                msg = f"failed to reach ONLINE, got {status}: {status_message}"
                 raise OperationFailed(msg)
             prefix = f"endpoint_name={endpoint_name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
                 sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)")
             time.sleep(sleep + random.random())
             attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+        raise TimeoutError(f"timed out after {timeout}: {status_message}")
 
     def create_endpoint(self, name: str, endpoint_type: EndpointType) -> Wait[EndpointInfo]:
         """Create an endpoint.
-        
+
         Create a new endpoint.
-        
+
         :param name: str
           Name of endpoint
         :param endpoint_type: :class:`EndpointType`
           Type of endpoint.
-        
+
         :returns:
           Long-running operation waiter for :class:`EndpointInfo`.
           See :method:wait_get_endpoint_vector_search_endpoint_online for more details.
         """
         body = {}
-        if endpoint_type is not None: body['endpoint_type'] = endpoint_type.value
-        if name is not None: body['name'] = name
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST', '/api/2.0/vector-search/endpoints', body=body, headers=headers)
-        return Wait(self.wait_get_endpoint_vector_search_endpoint_online,
-                    response=EndpointInfo.from_dict(op_response),
-                    endpoint_name=op_response['name'])
-
-    def create_endpoint_and_wait(self, name: str, endpoint_type: EndpointType,
-                                 timeout=timedelta(minutes=20)) -> EndpointInfo:
+        if endpoint_type is not None:
+            body["endpoint_type"] = endpoint_type.value
+        if name is not None:
+            body["name"] = name
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        op_response = self._api.do(
+            "POST",
+            "/api/2.0/vector-search/endpoints",
+            body=body,
+            headers=headers,
+        )
+        return Wait(
+            self.wait_get_endpoint_vector_search_endpoint_online,
+            response=EndpointInfo.from_dict(op_response),
+            endpoint_name=op_response["name"],
+        )
+
+    def create_endpoint_and_wait(
+        self,
+        name: str,
+        endpoint_type: EndpointType,
+        timeout=timedelta(minutes=20),
+    ) -> EndpointInfo:
         return self.create_endpoint(endpoint_type=endpoint_type, name=name).result(timeout=timeout)
 
     def delete_endpoint(self, endpoint_name: str):
         """Delete an endpoint.
-        
+
         :param endpoint_name: str
           Name of the endpoint
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.0/vector-search/endpoints/{endpoint_name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/vector-search/endpoints/{endpoint_name}",
+            headers=headers,
+        )
 
     def get_endpoint(self, endpoint_name: str) -> EndpointInfo:
         """Get an endpoint.
-        
+
         :param endpoint_name: str
           Name of the endpoint
-        
+
         :returns: :class:`EndpointInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/vector-search/endpoints/{endpoint_name}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/vector-search/endpoints/{endpoint_name}",
+            headers=headers,
+        )
         return EndpointInfo.from_dict(res)
 
     def list_endpoints(self, *, page_token: Optional[str] = None) -> Iterator[EndpointInfo]:
         """List all endpoints.
-        
+
         :param page_token: str (optional)
           Token for pagination
-        
+
         :returns: Iterator over :class:`EndpointInfo`
         """
 
         query = {}
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/vector-search/endpoints', query=query, headers=headers)
-            if 'endpoints' in json:
-                for v in json['endpoints']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/vector-search/endpoints",
+                query=query,
+                headers=headers,
+            )
+            if "endpoints" in json:
+                for v in json["endpoints"]:
                     yield EndpointInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
+            query["page_token"] = json["next_page_token"]
 
 
 class VectorSearchIndexesAPI:
     """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient
     approximate nearest neighbor (ANN) search queries.
-    
+
     There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that automatically syncs with
     a source Delta Table, automatically and incrementally updating the index as the underlying data in the
     Delta Table changes. * **Direct Vector Access Index**: An index that supports direct read and write of
-    vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates."""
+    vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
     def create_index(
-            self,
-            name: str,
-            endpoint_name: str,
-            primary_key: str,
-            index_type: VectorIndexType,
-            *,
-            delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None,
-            direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None
+        self,
+        name: str,
+        endpoint_name: str,
+        primary_key: str,
+        index_type: VectorIndexType,
+        *,
+        delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None,
+        direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None,
     ) -> CreateVectorIndexResponse:
         """Create an index.
-        
+
         Create a new index.
-        
+
         :param name: str
           Name of the index
         :param endpoint_name: str
@@ -1552,7 +1838,7 @@ def create_index(
           Primary key of the index
         :param index_type: :class:`VectorIndexType`
           There are 2 types of Vector Search indexes:
-          
+
           - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
           incrementally updating the index as the underlying data in the Delta Table changes. -
           `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our
@@ -1561,127 +1847,163 @@ def create_index(
           Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.
         :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional)
           Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.
-        
+
         :returns: :class:`CreateVectorIndexResponse`
         """
         body = {}
-        if delta_sync_index_spec is not None: body['delta_sync_index_spec'] = delta_sync_index_spec.as_dict()
+        if delta_sync_index_spec is not None:
+            body["delta_sync_index_spec"] = delta_sync_index_spec.as_dict()
         if direct_access_index_spec is not None:
-            body['direct_access_index_spec'] = direct_access_index_spec.as_dict()
-        if endpoint_name is not None: body['endpoint_name'] = endpoint_name
-        if index_type is not None: body['index_type'] = index_type.value
-        if name is not None: body['name'] = name
-        if primary_key is not None: body['primary_key'] = primary_key
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/vector-search/indexes', body=body, headers=headers)
+            body["direct_access_index_spec"] = direct_access_index_spec.as_dict()
+        if endpoint_name is not None:
+            body["endpoint_name"] = endpoint_name
+        if index_type is not None:
+            body["index_type"] = index_type.value
+        if name is not None:
+            body["name"] = name
+        if primary_key is not None:
+            body["primary_key"] = primary_key
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            "/api/2.0/vector-search/indexes",
+            body=body,
+            headers=headers,
+        )
         return CreateVectorIndexResponse.from_dict(res)
 
-    def delete_data_vector_index(self, index_name: str,
-                                 primary_keys: List[str]) -> DeleteDataVectorIndexResponse:
+    def delete_data_vector_index(self, index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse:
         """Delete data from index.
-        
+
         Handles the deletion of data from a specified vector index.
-        
+
         :param index_name: str
           Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index.
         :param primary_keys: List[str]
           List of primary keys for the data to be deleted.
-        
+
         :returns: :class:`DeleteDataVectorIndexResponse`
         """
         body = {}
-        if primary_keys is not None: body['primary_keys'] = [v for v in primary_keys]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/vector-search/indexes/{index_name}/delete-data',
-                           body=body,
-                           headers=headers)
+        if primary_keys is not None:
+            body["primary_keys"] = [v for v in primary_keys]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/vector-search/indexes/{index_name}/delete-data",
+            body=body,
+            headers=headers,
+        )
         return DeleteDataVectorIndexResponse.from_dict(res)
 
     def delete_index(self, index_name: str):
         """Delete an index.
-        
+
         Delete an index.
-        
+
         :param index_name: str
           Name of the index
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('DELETE', f'/api/2.0/vector-search/indexes/{index_name}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/vector-search/indexes/{index_name}",
+            headers=headers,
+        )
 
     def get_index(self, index_name: str) -> VectorIndex:
         """Get an index.
-        
+
         Get an index.
-        
+
         :param index_name: str
           Name of the index
-        
+
         :returns: :class:`VectorIndex`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/vector-search/indexes/{index_name}', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/vector-search/indexes/{index_name}",
+            headers=headers,
+        )
         return VectorIndex.from_dict(res)
 
-    def list_indexes(self,
-                     endpoint_name: str,
-                     *,
-                     page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]:
+    def list_indexes(self, endpoint_name: str, *, page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]:
         """List indexes.
-        
+
         List all indexes in the given endpoint.
-        
+
         :param endpoint_name: str
           Name of the endpoint
         :param page_token: str (optional)
           Token for pagination
-        
+
         :returns: Iterator over :class:`MiniVectorIndex`
         """
 
         query = {}
-        if endpoint_name is not None: query['endpoint_name'] = endpoint_name
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
+        if endpoint_name is not None:
+            query["endpoint_name"] = endpoint_name
+        if page_token is not None:
+            query["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/vector-search/indexes', query=query, headers=headers)
-            if 'vector_indexes' in json:
-                for v in json['vector_indexes']:
+            json = self._api.do(
+                "GET",
+                "/api/2.0/vector-search/indexes",
+                query=query,
+                headers=headers,
+            )
+            if "vector_indexes" in json:
+                for v in json["vector_indexes"]:
                     yield MiniVectorIndex.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['page_token'] = json['next_page_token']
-
-    def query_index(self,
-                    index_name: str,
-                    columns: List[str],
-                    *,
-                    filters_json: Optional[str] = None,
-                    num_results: Optional[int] = None,
-                    query_text: Optional[str] = None,
-                    query_type: Optional[str] = None,
-                    query_vector: Optional[List[float]] = None,
-                    score_threshold: Optional[float] = None) -> QueryVectorIndexResponse:
+            query["page_token"] = json["next_page_token"]
+
+    def query_index(
+        self,
+        index_name: str,
+        columns: List[str],
+        *,
+        filters_json: Optional[str] = None,
+        num_results: Optional[int] = None,
+        query_text: Optional[str] = None,
+        query_type: Optional[str] = None,
+        query_vector: Optional[List[float]] = None,
+        score_threshold: Optional[float] = None,
+    ) -> QueryVectorIndexResponse:
         """Query an index.
-        
+
         Query the specified vector index.
-        
+
         :param index_name: str
           Name of the vector index to query.
         :param columns: List[str]
           List of column names to include in the response.
         :param filters_json: str (optional)
           JSON string representing query filters.
-          
+
           Example filters: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater
           than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id
           greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.
@@ -1696,118 +2018,158 @@ def query_index(self,
           vectors.
         :param score_threshold: float (optional)
           Threshold for the approximate nearest neighbor search. Defaults to 0.0.
-        
+
         :returns: :class:`QueryVectorIndexResponse`
         """
         body = {}
-        if columns is not None: body['columns'] = [v for v in columns]
-        if filters_json is not None: body['filters_json'] = filters_json
-        if num_results is not None: body['num_results'] = num_results
-        if query_text is not None: body['query_text'] = query_text
-        if query_type is not None: body['query_type'] = query_type
-        if query_vector is not None: body['query_vector'] = [v for v in query_vector]
-        if score_threshold is not None: body['score_threshold'] = score_threshold
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/vector-search/indexes/{index_name}/query',
-                           body=body,
-                           headers=headers)
+        if columns is not None:
+            body["columns"] = [v for v in columns]
+        if filters_json is not None:
+            body["filters_json"] = filters_json
+        if num_results is not None:
+            body["num_results"] = num_results
+        if query_text is not None:
+            body["query_text"] = query_text
+        if query_type is not None:
+            body["query_type"] = query_type
+        if query_vector is not None:
+            body["query_vector"] = [v for v in query_vector]
+        if score_threshold is not None:
+            body["score_threshold"] = score_threshold
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/vector-search/indexes/{index_name}/query",
+            body=body,
+            headers=headers,
+        )
         return QueryVectorIndexResponse.from_dict(res)
 
-    def query_next_page(self,
-                        index_name: str,
-                        *,
-                        endpoint_name: Optional[str] = None,
-                        page_token: Optional[str] = None) -> QueryVectorIndexResponse:
+    def query_next_page(
+        self,
+        index_name: str,
+        *,
+        endpoint_name: Optional[str] = None,
+        page_token: Optional[str] = None,
+    ) -> QueryVectorIndexResponse:
         """Query next page.
-        
+
         Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request
         to fetch next page of results.
-        
+
         :param index_name: str
           Name of the vector index to query.
         :param endpoint_name: str (optional)
           Name of the endpoint.
         :param page_token: str (optional)
           Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.
-        
+
         :returns: :class:`QueryVectorIndexResponse`
         """
         body = {}
-        if endpoint_name is not None: body['endpoint_name'] = endpoint_name
-        if page_token is not None: body['page_token'] = page_token
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/vector-search/indexes/{index_name}/query-next-page',
-                           body=body,
-                           headers=headers)
+        if endpoint_name is not None:
+            body["endpoint_name"] = endpoint_name
+        if page_token is not None:
+            body["page_token"] = page_token
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/vector-search/indexes/{index_name}/query-next-page",
+            body=body,
+            headers=headers,
+        )
         return QueryVectorIndexResponse.from_dict(res)
 
-    def scan_index(self,
-                   index_name: str,
-                   *,
-                   last_primary_key: Optional[str] = None,
-                   num_results: Optional[int] = None) -> ScanVectorIndexResponse:
+    def scan_index(
+        self,
+        index_name: str,
+        *,
+        last_primary_key: Optional[str] = None,
+        num_results: Optional[int] = None,
+    ) -> ScanVectorIndexResponse:
         """Scan an index.
-        
+
         Scan the specified vector index and return the first `num_results` entries after the exclusive
         `primary_key`.
-        
+
         :param index_name: str
           Name of the vector index to scan.
         :param last_primary_key: str (optional)
           Primary key of the last entry returned in the previous scan.
         :param num_results: int (optional)
           Number of results to return. Defaults to 10.
-        
+
         :returns: :class:`ScanVectorIndexResponse`
         """
         body = {}
-        if last_primary_key is not None: body['last_primary_key'] = last_primary_key
-        if num_results is not None: body['num_results'] = num_results
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/vector-search/indexes/{index_name}/scan',
-                           body=body,
-                           headers=headers)
+        if last_primary_key is not None:
+            body["last_primary_key"] = last_primary_key
+        if num_results is not None:
+            body["num_results"] = num_results
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/vector-search/indexes/{index_name}/scan",
+            body=body,
+            headers=headers,
+        )
         return ScanVectorIndexResponse.from_dict(res)
 
     def sync_index(self, index_name: str):
         """Synchronize an index.
-        
+
         Triggers a synchronization process for a specified vector index.
-        
+
         :param index_name: str
           Name of the vector index to synchronize. Must be a Delta Sync Index.
-        
-        
+
+
         """
 
         headers = {}
 
-        self._api.do('POST', f'/api/2.0/vector-search/indexes/{index_name}/sync', headers=headers)
+        self._api.do(
+            "POST",
+            f"/api/2.0/vector-search/indexes/{index_name}/sync",
+            headers=headers,
+        )
 
     def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse:
         """Upsert data into an index.
-        
+
         Handles the upserting of data into a specified vector index.
-        
+
         :param index_name: str
           Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.
         :param inputs_json: str
           JSON string representing the data to be upserted.
-        
+
         :returns: :class:`UpsertDataVectorIndexResponse`
         """
         body = {}
-        if inputs_json is not None: body['inputs_json'] = inputs_json
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST',
-                           f'/api/2.0/vector-search/indexes/{index_name}/upsert-data',
-                           body=body,
-                           headers=headers)
+        if inputs_json is not None:
+            body["inputs_json"] = inputs_json
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do(
+            "POST",
+            f"/api/2.0/vector-search/indexes/{index_name}/upsert-data",
+            body=body,
+            headers=headers,
+        )
         return UpsertDataVectorIndexResponse.from_dict(res)
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index eb5418987..92cb563f2 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -9,7 +9,7 @@
 
 from ._internal import _enum, _from_dict, _repeated_dict
 
-_LOG = logging.getLogger('databricks.sdk')
+_LOG = logging.getLogger("databricks.sdk")
 
 # all definitions in this file are in alphabetical order
 
@@ -25,28 +25,35 @@ class AclItem:
     def as_dict(self) -> dict:
         """Serializes the AclItem into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission is not None: body['permission'] = self.permission.value
-        if self.principal is not None: body['principal'] = self.principal
+        if self.permission is not None:
+            body["permission"] = self.permission.value
+        if self.principal is not None:
+            body["principal"] = self.principal
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AclItem into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission is not None: body['permission'] = self.permission
-        if self.principal is not None: body['principal'] = self.principal
+        if self.permission is not None:
+            body["permission"] = self.permission
+        if self.principal is not None:
+            body["principal"] = self.principal
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AclItem:
         """Deserializes the AclItem from a dictionary."""
-        return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None))
+        return cls(
+            permission=_enum(d, "permission", AclPermission),
+            principal=d.get("principal", None),
+        )
 
 
 class AclPermission(Enum):
 
-    MANAGE = 'MANAGE'
-    READ = 'READ'
-    WRITE = 'WRITE'
+    MANAGE = "MANAGE"
+    READ = "READ"
+    WRITE = "WRITE"
 
 
 @dataclass
@@ -60,21 +67,28 @@ class AzureKeyVaultSecretScopeMetadata:
     def as_dict(self) -> dict:
         """Serializes the AzureKeyVaultSecretScopeMetadata into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dns_name is not None: body['dns_name'] = self.dns_name
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        if self.dns_name is not None:
+            body["dns_name"] = self.dns_name
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the AzureKeyVaultSecretScopeMetadata into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.dns_name is not None: body['dns_name'] = self.dns_name
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        if self.dns_name is not None:
+            body["dns_name"] = self.dns_name
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureKeyVaultSecretScopeMetadata:
         """Deserializes the AzureKeyVaultSecretScopeMetadata from a dictionary."""
-        return cls(dns_name=d.get('dns_name', None), resource_id=d.get('resource_id', None))
+        return cls(
+            dns_name=d.get("dns_name", None),
+            resource_id=d.get("resource_id", None),
+        )
 
 
 @dataclass
@@ -100,25 +114,33 @@ class CreateCredentialsRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
-        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
+        if self.personal_access_token is not None:
+            body["personal_access_token"] = self.personal_access_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
-        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
+        if self.personal_access_token is not None:
+            body["personal_access_token"] = self.personal_access_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsRequest:
         """Deserializes the CreateCredentialsRequest from a dictionary."""
-        return cls(git_provider=d.get('git_provider', None),
-                   git_username=d.get('git_username', None),
-                   personal_access_token=d.get('personal_access_token', None))
+        return cls(
+            git_provider=d.get("git_provider", None),
+            git_username=d.get("git_username", None),
+            personal_access_token=d.get("personal_access_token", None),
+        )
 
 
 @dataclass
@@ -136,25 +158,33 @@ class CreateCredentialsResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateCredentialsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsResponse:
         """Deserializes the CreateCredentialsResponse from a dictionary."""
-        return cls(credential_id=d.get('credential_id', None),
-                   git_provider=d.get('git_provider', None),
-                   git_username=d.get('git_username', None))
+        return cls(
+            credential_id=d.get("credential_id", None),
+            git_provider=d.get("git_provider", None),
+            git_username=d.get("git_username", None),
+        )
 
 
 @dataclass
@@ -178,28 +208,38 @@ class CreateRepoRequest:
     def as_dict(self) -> dict:
         """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
-        if self.url is not None: body['url'] = self.url
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout.as_dict()
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
-        if self.url is not None: body['url'] = self.url
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRepoRequest:
         """Deserializes the CreateRepoRequest from a dictionary."""
-        return cls(path=d.get('path', None),
-                   provider=d.get('provider', None),
-                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
-                   url=d.get('url', None))
+        return cls(
+            path=d.get("path", None),
+            provider=d.get("provider", None),
+            sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -228,37 +268,53 @@ class CreateRepoResponse:
     def as_dict(self) -> dict:
         """Serializes the CreateRepoResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
-        if self.id is not None: body['id'] = self.id
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
-        if self.url is not None: body['url'] = self.url
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.head_commit_id is not None:
+            body["head_commit_id"] = self.head_commit_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout.as_dict()
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateRepoResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
-        if self.id is not None: body['id'] = self.id
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
-        if self.url is not None: body['url'] = self.url
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.head_commit_id is not None:
+            body["head_commit_id"] = self.head_commit_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRepoResponse:
         """Deserializes the CreateRepoResponse from a dictionary."""
-        return cls(branch=d.get('branch', None),
-                   head_commit_id=d.get('head_commit_id', None),
-                   id=d.get('id', None),
-                   path=d.get('path', None),
-                   provider=d.get('provider', None),
-                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
-                   url=d.get('url', None))
+        return cls(
+            branch=d.get("branch", None),
+            head_commit_id=d.get("head_commit_id", None),
+            id=d.get("id", None),
+            path=d.get("path", None),
+            provider=d.get("provider", None),
+            sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -278,31 +334,38 @@ class CreateScope:
     def as_dict(self) -> dict:
         """Serializes the CreateScope into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault.as_dict()
+        if self.backend_azure_keyvault:
+            body["backend_azure_keyvault"] = self.backend_azure_keyvault.as_dict()
         if self.initial_manage_principal is not None:
-            body['initial_manage_principal'] = self.initial_manage_principal
-        if self.scope is not None: body['scope'] = self.scope
-        if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type.value
+            body["initial_manage_principal"] = self.initial_manage_principal
+        if self.scope is not None:
+            body["scope"] = self.scope
+        if self.scope_backend_type is not None:
+            body["scope_backend_type"] = self.scope_backend_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CreateScope into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault
+        if self.backend_azure_keyvault:
+            body["backend_azure_keyvault"] = self.backend_azure_keyvault
         if self.initial_manage_principal is not None:
-            body['initial_manage_principal'] = self.initial_manage_principal
-        if self.scope is not None: body['scope'] = self.scope
-        if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type
+            body["initial_manage_principal"] = self.initial_manage_principal
+        if self.scope is not None:
+            body["scope"] = self.scope
+        if self.scope_backend_type is not None:
+            body["scope_backend_type"] = self.scope_backend_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateScope:
         """Deserializes the CreateScope from a dictionary."""
-        return cls(backend_azure_keyvault=_from_dict(d, 'backend_azure_keyvault',
-                                                     AzureKeyVaultSecretScopeMetadata),
-                   initial_manage_principal=d.get('initial_manage_principal', None),
-                   scope=d.get('scope', None),
-                   scope_backend_type=_enum(d, 'scope_backend_type', ScopeBackendType))
+        return cls(
+            backend_azure_keyvault=_from_dict(d, "backend_azure_keyvault", AzureKeyVaultSecretScopeMetadata),
+            initial_manage_principal=d.get("initial_manage_principal", None),
+            scope=d.get("scope", None),
+            scope_backend_type=_enum(d, "scope_backend_type", ScopeBackendType),
+        )
 
 
 @dataclass
@@ -339,25 +402,33 @@ class CredentialInfo:
     def as_dict(self) -> dict:
         """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
         """Deserializes the CredentialInfo from a dictionary."""
-        return cls(credential_id=d.get('credential_id', None),
-                   git_provider=d.get('git_provider', None),
-                   git_username=d.get('git_username', None))
+        return cls(
+            credential_id=d.get("credential_id", None),
+            git_provider=d.get("git_provider", None),
+            git_username=d.get("git_username", None),
+        )
 
 
 @dataclass
@@ -373,21 +444,25 @@ class Delete:
     def as_dict(self) -> dict:
         """Serializes the Delete into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.recursive is not None: body['recursive'] = self.recursive
+        if self.path is not None:
+            body["path"] = self.path
+        if self.recursive is not None:
+            body["recursive"] = self.recursive
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Delete into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
-        if self.recursive is not None: body['recursive'] = self.recursive
+        if self.path is not None:
+            body["path"] = self.path
+        if self.recursive is not None:
+            body["recursive"] = self.recursive
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Delete:
         """Deserializes the Delete from a dictionary."""
-        return cls(path=d.get('path', None), recursive=d.get('recursive', None))
+        return cls(path=d.get("path", None), recursive=d.get("recursive", None))
 
 
 @dataclass
@@ -401,21 +476,25 @@ class DeleteAcl:
     def as_dict(self) -> dict:
         """Serializes the DeleteAcl into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.scope is not None: body['scope'] = self.scope
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.principal is not None: body['principal'] = self.principal
-        if self.scope is not None: body['scope'] = self.scope
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAcl:
         """Deserializes the DeleteAcl from a dictionary."""
-        return cls(principal=d.get('principal', None), scope=d.get('scope', None))
+        return cls(principal=d.get("principal", None), scope=d.get("scope", None))
 
 
 @dataclass
@@ -502,19 +581,21 @@ class DeleteScope:
     def as_dict(self) -> dict:
         """Serializes the DeleteScope into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.scope is not None: body['scope'] = self.scope
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteScope into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.scope is not None: body['scope'] = self.scope
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScope:
         """Deserializes the DeleteScope from a dictionary."""
-        return cls(scope=d.get('scope', None))
+        return cls(scope=d.get("scope", None))
 
 
 @dataclass
@@ -547,21 +628,25 @@ class DeleteSecret:
     def as_dict(self) -> dict:
         """Serializes the DeleteSecret into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.scope is not None: body['scope'] = self.scope
+        if self.key is not None:
+            body["key"] = self.key
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.scope is not None: body['scope'] = self.scope
+        if self.key is not None:
+            body["key"] = self.key
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSecret:
         """Deserializes the DeleteSecret from a dictionary."""
-        return cls(key=d.get('key', None), scope=d.get('scope', None))
+        return cls(key=d.get("key", None), scope=d.get("scope", None))
 
 
 @dataclass
@@ -585,12 +670,12 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteSecretResponse:
 
 class ExportFormat(Enum):
 
-    AUTO = 'AUTO'
-    DBC = 'DBC'
-    HTML = 'HTML'
-    JUPYTER = 'JUPYTER'
-    R_MARKDOWN = 'R_MARKDOWN'
-    SOURCE = 'SOURCE'
+    AUTO = "AUTO"
+    DBC = "DBC"
+    HTML = "HTML"
+    JUPYTER = "JUPYTER"
+    R_MARKDOWN = "R_MARKDOWN"
+    SOURCE = "SOURCE"
 
 
 @dataclass
@@ -605,21 +690,25 @@ class ExportResponse:
     def as_dict(self) -> dict:
         """Serializes the ExportResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.file_type is not None: body['file_type'] = self.file_type
+        if self.content is not None:
+            body["content"] = self.content
+        if self.file_type is not None:
+            body["file_type"] = self.file_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ExportResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.file_type is not None: body['file_type'] = self.file_type
+        if self.content is not None:
+            body["content"] = self.content
+        if self.file_type is not None:
+            body["file_type"] = self.file_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportResponse:
         """Deserializes the ExportResponse from a dictionary."""
-        return cls(content=d.get('content', None), file_type=d.get('file_type', None))
+        return cls(content=d.get("content", None), file_type=d.get("file_type", None))
 
 
 @dataclass
@@ -637,25 +726,33 @@ class GetCredentialsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetCredentialsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCredentialsResponse:
         """Deserializes the GetCredentialsResponse from a dictionary."""
-        return cls(credential_id=d.get('credential_id', None),
-                   git_provider=d.get('git_provider', None),
-                   git_username=d.get('git_username', None))
+        return cls(
+            credential_id=d.get("credential_id", None),
+            git_provider=d.get("git_provider", None),
+            git_username=d.get("git_username", None),
+        )
 
 
 @dataclass
@@ -666,19 +763,21 @@ class GetRepoPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetRepoPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetRepoPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRepoPermissionLevelsResponse:
         """Deserializes the GetRepoPermissionLevelsResponse from a dictionary."""
-        return cls(permission_levels=_repeated_dict(d, 'permission_levels', RepoPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", RepoPermissionsDescription))
 
 
 @dataclass
@@ -707,37 +806,53 @@ class GetRepoResponse:
     def as_dict(self) -> dict:
         """Serializes the GetRepoResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
-        if self.id is not None: body['id'] = self.id
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
-        if self.url is not None: body['url'] = self.url
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.head_commit_id is not None:
+            body["head_commit_id"] = self.head_commit_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout.as_dict()
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetRepoResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
-        if self.id is not None: body['id'] = self.id
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
-        if self.url is not None: body['url'] = self.url
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.head_commit_id is not None:
+            body["head_commit_id"] = self.head_commit_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRepoResponse:
         """Deserializes the GetRepoResponse from a dictionary."""
-        return cls(branch=d.get('branch', None),
-                   head_commit_id=d.get('head_commit_id', None),
-                   id=d.get('id', None),
-                   path=d.get('path', None),
-                   provider=d.get('provider', None),
-                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
-                   url=d.get('url', None))
+        return cls(
+            branch=d.get("branch", None),
+            head_commit_id=d.get("head_commit_id", None),
+            id=d.get("id", None),
+            path=d.get("path", None),
+            provider=d.get("provider", None),
+            sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -751,21 +866,25 @@ class GetSecretResponse:
     def as_dict(self) -> dict:
         """Serializes the GetSecretResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetSecretResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
-        if self.value is not None: body['value'] = self.value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.value is not None:
+            body["value"] = self.value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetSecretResponse:
         """Deserializes the GetSecretResponse from a dictionary."""
-        return cls(key=d.get('key', None), value=d.get('value', None))
+        return cls(key=d.get("key", None), value=d.get("value", None))
 
 
 @dataclass
@@ -776,20 +895,21 @@ class GetWorkspaceObjectPermissionLevelsResponse:
     def as_dict(self) -> dict:
         """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
+        if self.permission_levels:
+            body["permission_levels"] = [v.as_dict() for v in self.permission_levels]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        if self.permission_levels:
+            body["permission_levels"] = self.permission_levels
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceObjectPermissionLevelsResponse:
         """Deserializes the GetWorkspaceObjectPermissionLevelsResponse from a dictionary."""
-        return cls(
-            permission_levels=_repeated_dict(d, 'permission_levels', WorkspaceObjectPermissionsDescription))
+        return cls(permission_levels=_repeated_dict(d, "permission_levels", WorkspaceObjectPermissionsDescription))
 
 
 @dataclass
@@ -827,38 +947,50 @@ class Import:
     def as_dict(self) -> dict:
         """Serializes the Import into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.format is not None: body['format'] = self.format.value
-        if self.language is not None: body['language'] = self.language.value
-        if self.overwrite is not None: body['overwrite'] = self.overwrite
-        if self.path is not None: body['path'] = self.path
+        if self.content is not None:
+            body["content"] = self.content
+        if self.format is not None:
+            body["format"] = self.format.value
+        if self.language is not None:
+            body["language"] = self.language.value
+        if self.overwrite is not None:
+            body["overwrite"] = self.overwrite
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Import into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.content is not None: body['content'] = self.content
-        if self.format is not None: body['format'] = self.format
-        if self.language is not None: body['language'] = self.language
-        if self.overwrite is not None: body['overwrite'] = self.overwrite
-        if self.path is not None: body['path'] = self.path
+        if self.content is not None:
+            body["content"] = self.content
+        if self.format is not None:
+            body["format"] = self.format
+        if self.language is not None:
+            body["language"] = self.language
+        if self.overwrite is not None:
+            body["overwrite"] = self.overwrite
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Import:
         """Deserializes the Import from a dictionary."""
-        return cls(content=d.get('content', None),
-                   format=_enum(d, 'format', ImportFormat),
-                   language=_enum(d, 'language', Language),
-                   overwrite=d.get('overwrite', None),
-                   path=d.get('path', None))
+        return cls(
+            content=d.get("content", None),
+            format=_enum(d, "format", ImportFormat),
+            language=_enum(d, "language", Language),
+            overwrite=d.get("overwrite", None),
+            path=d.get("path", None),
+        )
 
 
 class ImportFormat(Enum):
     """This specifies the format of the file to be imported.
-    
+
     The value is case sensitive.
-    
+
     - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
     content provided in the request. If the item is imported as a notebook, then the item's
     extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source
@@ -867,13 +999,13 @@ class ImportFormat(Enum):
     format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown
     format."""
 
-    AUTO = 'AUTO'
-    DBC = 'DBC'
-    HTML = 'HTML'
-    JUPYTER = 'JUPYTER'
-    RAW = 'RAW'
-    R_MARKDOWN = 'R_MARKDOWN'
-    SOURCE = 'SOURCE'
+    AUTO = "AUTO"
+    DBC = "DBC"
+    HTML = "HTML"
+    JUPYTER = "JUPYTER"
+    RAW = "RAW"
+    R_MARKDOWN = "R_MARKDOWN"
+    SOURCE = "SOURCE"
 
 
 @dataclass
@@ -898,10 +1030,10 @@ def from_dict(cls, d: Dict[str, any]) -> ImportResponse:
 class Language(Enum):
     """The language of the object. This value is set only if the object type is `NOTEBOOK`."""
 
-    PYTHON = 'PYTHON'
-    R = 'R'
-    SCALA = 'SCALA'
-    SQL = 'SQL'
+    PYTHON = "PYTHON"
+    R = "R"
+    SCALA = "SCALA"
+    SQL = "SQL"
 
 
 @dataclass
@@ -912,19 +1044,21 @@ class ListAclsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListAclsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.items: body['items'] = [v.as_dict() for v in self.items]
+        if self.items:
+            body["items"] = [v.as_dict() for v in self.items]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListAclsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.items: body['items'] = self.items
+        if self.items:
+            body["items"] = self.items
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAclsResponse:
         """Deserializes the ListAclsResponse from a dictionary."""
-        return cls(items=_repeated_dict(d, 'items', AclItem))
+        return cls(items=_repeated_dict(d, "items", AclItem))
 
 
 @dataclass
@@ -935,19 +1069,21 @@ class ListCredentialsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        if self.credentials:
+            body["credentials"] = [v.as_dict() for v in self.credentials]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credentials: body['credentials'] = self.credentials
+        if self.credentials:
+            body["credentials"] = self.credentials
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
         """Deserializes the ListCredentialsResponse from a dictionary."""
-        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo))
+        return cls(credentials=_repeated_dict(d, "credentials", CredentialInfo))
 
 
 @dataclass
@@ -962,21 +1098,28 @@ class ListReposResponse:
     def as_dict(self) -> dict:
         """Serializes the ListReposResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.repos: body['repos'] = [v.as_dict() for v in self.repos]
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.repos:
+            body["repos"] = [v.as_dict() for v in self.repos]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListReposResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        if self.repos: body['repos'] = self.repos
+        if self.next_page_token is not None:
+            body["next_page_token"] = self.next_page_token
+        if self.repos:
+            body["repos"] = self.repos
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListReposResponse:
         """Deserializes the ListReposResponse from a dictionary."""
-        return cls(next_page_token=d.get('next_page_token', None), repos=_repeated_dict(d, 'repos', RepoInfo))
+        return cls(
+            next_page_token=d.get("next_page_token", None),
+            repos=_repeated_dict(d, "repos", RepoInfo),
+        )
 
 
 @dataclass
@@ -987,19 +1130,21 @@ class ListResponse:
     def as_dict(self) -> dict:
         """Serializes the ListResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
+        if self.objects:
+            body["objects"] = [v.as_dict() for v in self.objects]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.objects: body['objects'] = self.objects
+        if self.objects:
+            body["objects"] = self.objects
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListResponse:
         """Deserializes the ListResponse from a dictionary."""
-        return cls(objects=_repeated_dict(d, 'objects', ObjectInfo))
+        return cls(objects=_repeated_dict(d, "objects", ObjectInfo))
 
 
 @dataclass
@@ -1010,19 +1155,21 @@ class ListScopesResponse:
     def as_dict(self) -> dict:
         """Serializes the ListScopesResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes]
+        if self.scopes:
+            body["scopes"] = [v.as_dict() for v in self.scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListScopesResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.scopes: body['scopes'] = self.scopes
+        if self.scopes:
+            body["scopes"] = self.scopes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListScopesResponse:
         """Deserializes the ListScopesResponse from a dictionary."""
-        return cls(scopes=_repeated_dict(d, 'scopes', SecretScope))
+        return cls(scopes=_repeated_dict(d, "scopes", SecretScope))
 
 
 @dataclass
@@ -1033,19 +1180,21 @@ class ListSecretsResponse:
     def as_dict(self) -> dict:
         """Serializes the ListSecretsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
+        if self.secrets:
+            body["secrets"] = [v.as_dict() for v in self.secrets]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ListSecretsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.secrets: body['secrets'] = self.secrets
+        if self.secrets:
+            body["secrets"] = self.secrets
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSecretsResponse:
         """Deserializes the ListSecretsResponse from a dictionary."""
-        return cls(secrets=_repeated_dict(d, 'secrets', SecretMetadata))
+        return cls(secrets=_repeated_dict(d, "secrets", SecretMetadata))
 
 
 @dataclass
@@ -1057,19 +1206,21 @@ class Mkdirs:
     def as_dict(self) -> dict:
         """Serializes the Mkdirs into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the Mkdirs into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.path is not None: body['path'] = self.path
+        if self.path is not None:
+            body["path"] = self.path
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Mkdirs:
         """Deserializes the Mkdirs from a dictionary."""
-        return cls(path=d.get('path', None))
+        return cls(path=d.get("path", None))
 
 
 @dataclass
@@ -1124,55 +1275,73 @@ class ObjectInfo:
     def as_dict(self) -> dict:
         """Serializes the ObjectInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.language is not None: body['language'] = self.language.value
-        if self.modified_at is not None: body['modified_at'] = self.modified_at
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type.value
-        if self.path is not None: body['path'] = self.path
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
-        if self.size is not None: body['size'] = self.size
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.language is not None:
+            body["language"] = self.language.value
+        if self.modified_at is not None:
+            body["modified_at"] = self.modified_at
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type.value
+        if self.path is not None:
+            body["path"] = self.path
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
+        if self.size is not None:
+            body["size"] = self.size
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the ObjectInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.language is not None: body['language'] = self.language
-        if self.modified_at is not None: body['modified_at'] = self.modified_at
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
-        if self.path is not None: body['path'] = self.path
-        if self.resource_id is not None: body['resource_id'] = self.resource_id
-        if self.size is not None: body['size'] = self.size
+        if self.created_at is not None:
+            body["created_at"] = self.created_at
+        if self.language is not None:
+            body["language"] = self.language
+        if self.modified_at is not None:
+            body["modified_at"] = self.modified_at
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
+        if self.path is not None:
+            body["path"] = self.path
+        if self.resource_id is not None:
+            body["resource_id"] = self.resource_id
+        if self.size is not None:
+            body["size"] = self.size
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ObjectInfo:
         """Deserializes the ObjectInfo from a dictionary."""
-        return cls(created_at=d.get('created_at', None),
-                   language=_enum(d, 'language', Language),
-                   modified_at=d.get('modified_at', None),
-                   object_id=d.get('object_id', None),
-                   object_type=_enum(d, 'object_type', ObjectType),
-                   path=d.get('path', None),
-                   resource_id=d.get('resource_id', None),
-                   size=d.get('size', None))
+        return cls(
+            created_at=d.get("created_at", None),
+            language=_enum(d, "language", Language),
+            modified_at=d.get("modified_at", None),
+            object_id=d.get("object_id", None),
+            object_type=_enum(d, "object_type", ObjectType),
+            path=d.get("path", None),
+            resource_id=d.get("resource_id", None),
+            size=d.get("size", None),
+        )
 
 
 class ObjectType(Enum):
     """The type of the object in workspace.
-    
+
     - `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. -
     `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`:
     Lakeview dashboard"""
 
-    DASHBOARD = 'DASHBOARD'
-    DIRECTORY = 'DIRECTORY'
-    FILE = 'FILE'
-    LIBRARY = 'LIBRARY'
-    NOTEBOOK = 'NOTEBOOK'
-    REPO = 'REPO'
+    DASHBOARD = "DASHBOARD"
+    DIRECTORY = "DIRECTORY"
+    FILE = "FILE"
+    LIBRARY = "LIBRARY"
+    NOTEBOOK = "NOTEBOOK"
+    REPO = "REPO"
 
 
 @dataclass
@@ -1189,25 +1358,33 @@ class PutAcl:
     def as_dict(self) -> dict:
         """Serializes the PutAcl into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.permission is not None: body['permission'] = self.permission.value
-        if self.principal is not None: body['principal'] = self.principal
-        if self.scope is not None: body['scope'] = self.scope
+        if self.permission is not None:
+            body["permission"] = self.permission.value
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PutAcl into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.permission is not None: body['permission'] = self.permission
-        if self.principal is not None: body['principal'] = self.principal
-        if self.scope is not None: body['scope'] = self.scope
+        if self.permission is not None:
+            body["permission"] = self.permission
+        if self.principal is not None:
+            body["principal"] = self.principal
+        if self.scope is not None:
+            body["scope"] = self.scope
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAcl:
         """Deserializes the PutAcl from a dictionary."""
-        return cls(permission=_enum(d, 'permission', AclPermission),
-                   principal=d.get('principal', None),
-                   scope=d.get('scope', None))
+        return cls(
+            permission=_enum(d, "permission", AclPermission),
+            principal=d.get("principal", None),
+            scope=d.get("scope", None),
+        )
 
 
 @dataclass
@@ -1246,28 +1423,38 @@ class PutSecret:
     def as_dict(self) -> dict:
         """Serializes the PutSecret into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.bytes_value is not None: body['bytes_value'] = self.bytes_value
-        if self.key is not None: body['key'] = self.key
-        if self.scope is not None: body['scope'] = self.scope
-        if self.string_value is not None: body['string_value'] = self.string_value
+        if self.bytes_value is not None:
+            body["bytes_value"] = self.bytes_value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.scope is not None:
+            body["scope"] = self.scope
+        if self.string_value is not None:
+            body["string_value"] = self.string_value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the PutSecret into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.bytes_value is not None: body['bytes_value'] = self.bytes_value
-        if self.key is not None: body['key'] = self.key
-        if self.scope is not None: body['scope'] = self.scope
-        if self.string_value is not None: body['string_value'] = self.string_value
+        if self.bytes_value is not None:
+            body["bytes_value"] = self.bytes_value
+        if self.key is not None:
+            body["key"] = self.key
+        if self.scope is not None:
+            body["scope"] = self.scope
+        if self.string_value is not None:
+            body["string_value"] = self.string_value
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutSecret:
         """Deserializes the PutSecret from a dictionary."""
-        return cls(bytes_value=d.get('bytes_value', None),
-                   key=d.get('key', None),
-                   scope=d.get('scope', None),
-                   string_value=d.get('string_value', None))
+        return cls(
+            bytes_value=d.get("bytes_value", None),
+            key=d.get("key", None),
+            scope=d.get("scope", None),
+            string_value=d.get("string_value", None),
+        )
 
 
 @dataclass
@@ -1306,30 +1493,38 @@ class RepoAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the RepoAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlRequest:
         """Deserializes the RepoAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', RepoPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", RepoPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1352,33 +1547,43 @@ class RepoAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the RepoAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlResponse:
         """Deserializes the RepoAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', RepoPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", RepoPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1409,37 +1614,53 @@ class RepoInfo:
     def as_dict(self) -> dict:
         """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
-        if self.id is not None: body['id'] = self.id
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
-        if self.url is not None: body['url'] = self.url
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.head_commit_id is not None:
+            body["head_commit_id"] = self.head_commit_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout.as_dict()
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoInfo into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
-        if self.id is not None: body['id'] = self.id
-        if self.path is not None: body['path'] = self.path
-        if self.provider is not None: body['provider'] = self.provider
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
-        if self.url is not None: body['url'] = self.url
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.head_commit_id is not None:
+            body["head_commit_id"] = self.head_commit_id
+        if self.id is not None:
+            body["id"] = self.id
+        if self.path is not None:
+            body["path"] = self.path
+        if self.provider is not None:
+            body["provider"] = self.provider
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout
+        if self.url is not None:
+            body["url"] = self.url
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInfo:
         """Deserializes the RepoInfo from a dictionary."""
-        return cls(branch=d.get('branch', None),
-                   head_commit_id=d.get('head_commit_id', None),
-                   id=d.get('id', None),
-                   path=d.get('path', None),
-                   provider=d.get('provider', None),
-                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
-                   url=d.get('url', None))
+        return cls(
+            branch=d.get("branch", None),
+            head_commit_id=d.get("head_commit_id", None),
+            id=d.get("id", None),
+            path=d.get("path", None),
+            provider=d.get("provider", None),
+            sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout),
+            url=d.get("url", None),
+        )
 
 
 @dataclass
@@ -1454,34 +1675,42 @@ class RepoPermission:
     def as_dict(self) -> dict:
         """Serializes the RepoPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermission:
         """Deserializes the RepoPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', RepoPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", RepoPermissionLevel),
+        )
 
 
 class RepoPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_READ = 'CAN_READ'
-    CAN_RUN = 'CAN_RUN'
+    CAN_EDIT = "CAN_EDIT"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_READ = "CAN_READ"
+    CAN_RUN = "CAN_RUN"
 
 
 @dataclass
@@ -1496,25 +1725,32 @@ def as_dict(self) -> dict:
         """Serializes the RepoPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissions:
         """Deserializes the RepoPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', RepoAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -1527,22 +1763,28 @@ class RepoPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the RepoPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsDescription:
         """Deserializes the RepoPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', RepoPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", RepoPermissionLevel),
+        )
 
 
 @dataclass
@@ -1556,28 +1798,33 @@ def as_dict(self) -> dict:
         """Serializes the RepoPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.repo_id is not None: body['repo_id'] = self.repo_id
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.repo_id is not None:
+            body["repo_id"] = self.repo_id
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.repo_id is not None: body['repo_id'] = self.repo_id
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.repo_id is not None:
+            body["repo_id"] = self.repo_id
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsRequest:
         """Deserializes the RepoPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list', RepoAccessControlRequest),
-                   repo_id=d.get('repo_id', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlRequest),
+            repo_id=d.get("repo_id", None),
+        )
 
 
 class ScopeBackendType(Enum):
 
-    AZURE_KEYVAULT = 'AZURE_KEYVAULT'
-    DATABRICKS = 'DATABRICKS'
+    AZURE_KEYVAULT = "AZURE_KEYVAULT"
+    DATABRICKS = "DATABRICKS"
 
 
 @dataclass
@@ -1591,23 +1838,28 @@ class SecretMetadata:
     def as_dict(self) -> dict:
         """Serializes the SecretMetadata into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.key is not None: body['key'] = self.key
+        if self.key is not None:
+            body["key"] = self.key
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
+            body["last_updated_timestamp"] = self.last_updated_timestamp
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SecretMetadata into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.key is not None: body['key'] = self.key
+        if self.key is not None:
+            body["key"] = self.key
         if self.last_updated_timestamp is not None:
-            body['last_updated_timestamp'] = self.last_updated_timestamp
+            body["last_updated_timestamp"] = self.last_updated_timestamp
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretMetadata:
         """Deserializes the SecretMetadata from a dictionary."""
-        return cls(key=d.get('key', None), last_updated_timestamp=d.get('last_updated_timestamp', None))
+        return cls(
+            key=d.get("key", None),
+            last_updated_timestamp=d.get("last_updated_timestamp", None),
+        )
 
 
 @dataclass
@@ -1624,25 +1876,33 @@ class SecretScope:
     def as_dict(self) -> dict:
         """Serializes the SecretScope into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.backend_type is not None: body['backend_type'] = self.backend_type.value
-        if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict()
-        if self.name is not None: body['name'] = self.name
+        if self.backend_type is not None:
+            body["backend_type"] = self.backend_type.value
+        if self.keyvault_metadata:
+            body["keyvault_metadata"] = self.keyvault_metadata.as_dict()
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SecretScope into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.backend_type is not None: body['backend_type'] = self.backend_type
-        if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata
-        if self.name is not None: body['name'] = self.name
+        if self.backend_type is not None:
+            body["backend_type"] = self.backend_type
+        if self.keyvault_metadata:
+            body["keyvault_metadata"] = self.keyvault_metadata
+        if self.name is not None:
+            body["name"] = self.name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretScope:
         """Deserializes the SecretScope from a dictionary."""
-        return cls(backend_type=_enum(d, 'backend_type', ScopeBackendType),
-                   keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata),
-                   name=d.get('name', None))
+        return cls(
+            backend_type=_enum(d, "backend_type", ScopeBackendType),
+            keyvault_metadata=_from_dict(d, "keyvault_metadata", AzureKeyVaultSecretScopeMetadata),
+            name=d.get("name", None),
+        )
 
 
 @dataclass
@@ -1657,19 +1917,21 @@ class SparseCheckout:
     def as_dict(self) -> dict:
         """Serializes the SparseCheckout into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.patterns: body['patterns'] = [v for v in self.patterns]
+        if self.patterns:
+            body["patterns"] = [v for v in self.patterns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparseCheckout into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.patterns: body['patterns'] = self.patterns
+        if self.patterns:
+            body["patterns"] = self.patterns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparseCheckout:
         """Deserializes the SparseCheckout from a dictionary."""
-        return cls(patterns=d.get('patterns', None))
+        return cls(patterns=d.get("patterns", None))
 
 
 @dataclass
@@ -1684,19 +1946,21 @@ class SparseCheckoutUpdate:
     def as_dict(self) -> dict:
         """Serializes the SparseCheckoutUpdate into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.patterns: body['patterns'] = [v for v in self.patterns]
+        if self.patterns:
+            body["patterns"] = [v for v in self.patterns]
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the SparseCheckoutUpdate into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.patterns: body['patterns'] = self.patterns
+        if self.patterns:
+            body["patterns"] = self.patterns
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparseCheckoutUpdate:
         """Deserializes the SparseCheckoutUpdate from a dictionary."""
-        return cls(patterns=d.get('patterns', None))
+        return cls(patterns=d.get("patterns", None))
 
 
 @dataclass
@@ -1725,28 +1989,38 @@ class UpdateCredentialsRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
-        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
+        if self.personal_access_token is not None:
+            body["personal_access_token"] = self.personal_access_token
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.git_provider is not None: body['git_provider'] = self.git_provider
-        if self.git_username is not None: body['git_username'] = self.git_username
-        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        if self.credential_id is not None:
+            body["credential_id"] = self.credential_id
+        if self.git_provider is not None:
+            body["git_provider"] = self.git_provider
+        if self.git_username is not None:
+            body["git_username"] = self.git_username
+        if self.personal_access_token is not None:
+            body["personal_access_token"] = self.personal_access_token
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsRequest:
         """Deserializes the UpdateCredentialsRequest from a dictionary."""
-        return cls(credential_id=d.get('credential_id', None),
-                   git_provider=d.get('git_provider', None),
-                   git_username=d.get('git_username', None),
-                   personal_access_token=d.get('personal_access_token', None))
+        return cls(
+            credential_id=d.get("credential_id", None),
+            git_provider=d.get("git_provider", None),
+            git_username=d.get("git_username", None),
+            personal_access_token=d.get("personal_access_token", None),
+        )
 
 
 @dataclass
@@ -1788,28 +2062,38 @@ class UpdateRepoRequest:
     def as_dict(self) -> dict:
         """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.repo_id is not None: body['repo_id'] = self.repo_id
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
-        if self.tag is not None: body['tag'] = self.tag
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.repo_id is not None:
+            body["repo_id"] = self.repo_id
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout.as_dict()
+        if self.tag is not None:
+            body["tag"] = self.tag
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.branch is not None: body['branch'] = self.branch
-        if self.repo_id is not None: body['repo_id'] = self.repo_id
-        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
-        if self.tag is not None: body['tag'] = self.tag
+        if self.branch is not None:
+            body["branch"] = self.branch
+        if self.repo_id is not None:
+            body["repo_id"] = self.repo_id
+        if self.sparse_checkout:
+            body["sparse_checkout"] = self.sparse_checkout
+        if self.tag is not None:
+            body["tag"] = self.tag
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRepoRequest:
         """Deserializes the UpdateRepoRequest from a dictionary."""
-        return cls(branch=d.get('branch', None),
-                   repo_id=d.get('repo_id', None),
-                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate),
-                   tag=d.get('tag', None))
+        return cls(
+            branch=d.get("branch", None),
+            repo_id=d.get("repo_id", None),
+            sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckoutUpdate),
+            tag=d.get("tag", None),
+        )
 
 
 @dataclass
@@ -1848,30 +2132,38 @@ class WorkspaceObjectAccessControlRequest:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceObjectAccessControlRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceObjectAccessControlRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.group_name is not None: body['group_name'] = self.group_name
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlRequest:
         """Deserializes the WorkspaceObjectAccessControlRequest from a dictionary."""
-        return cls(group_name=d.get('group_name', None),
-                   permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            group_name=d.get("group_name", None),
+            permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1894,33 +2186,43 @@ class WorkspaceObjectAccessControlResponse:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceObjectAccessControlResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions]
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = [v.as_dict() for v in self.all_permissions]
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceObjectAccessControlResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.all_permissions: body['all_permissions'] = self.all_permissions
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.all_permissions:
+            body["all_permissions"] = self.all_permissions
+        if self.display_name is not None:
+            body["display_name"] = self.display_name
+        if self.group_name is not None:
+            body["group_name"] = self.group_name
         if self.service_principal_name is not None:
-            body['service_principal_name'] = self.service_principal_name
-        if self.user_name is not None: body['user_name'] = self.user_name
+            body["service_principal_name"] = self.service_principal_name
+        if self.user_name is not None:
+            body["user_name"] = self.user_name
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlResponse:
         """Deserializes the WorkspaceObjectAccessControlResponse from a dictionary."""
-        return cls(all_permissions=_repeated_dict(d, 'all_permissions', WorkspaceObjectPermission),
-                   display_name=d.get('display_name', None),
-                   group_name=d.get('group_name', None),
-                   service_principal_name=d.get('service_principal_name', None),
-                   user_name=d.get('user_name', None))
+        return cls(
+            all_permissions=_repeated_dict(d, "all_permissions", WorkspaceObjectPermission),
+            display_name=d.get("display_name", None),
+            group_name=d.get("group_name", None),
+            service_principal_name=d.get("service_principal_name", None),
+            user_name=d.get("user_name", None),
+        )
 
 
 @dataclass
@@ -1935,34 +2237,42 @@ class WorkspaceObjectPermission:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermission into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object]
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = [v for v in self.inherited_from_object]
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermission into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.inherited is not None: body['inherited'] = self.inherited
-        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.inherited is not None:
+            body["inherited"] = self.inherited
+        if self.inherited_from_object:
+            body["inherited_from_object"] = self.inherited_from_object
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermission:
         """Deserializes the WorkspaceObjectPermission from a dictionary."""
-        return cls(inherited=d.get('inherited', None),
-                   inherited_from_object=d.get('inherited_from_object', None),
-                   permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel))
+        return cls(
+            inherited=d.get("inherited", None),
+            inherited_from_object=d.get("inherited_from_object", None),
+            permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel),
+        )
 
 
 class WorkspaceObjectPermissionLevel(Enum):
     """Permission level"""
 
-    CAN_EDIT = 'CAN_EDIT'
-    CAN_MANAGE = 'CAN_MANAGE'
-    CAN_READ = 'CAN_READ'
-    CAN_RUN = 'CAN_RUN'
+    CAN_EDIT = "CAN_EDIT"
+    CAN_MANAGE = "CAN_MANAGE"
+    CAN_READ = "CAN_READ"
+    CAN_RUN = "CAN_RUN"
 
 
 @dataclass
@@ -1977,26 +2287,32 @@ def as_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermissions into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermissions into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.object_id is not None: body['object_id'] = self.object_id
-        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.object_id is not None:
+            body["object_id"] = self.object_id
+        if self.object_type is not None:
+            body["object_type"] = self.object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissions:
         """Deserializes the WorkspaceObjectPermissions from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      WorkspaceObjectAccessControlResponse),
-                   object_id=d.get('object_id', None),
-                   object_type=d.get('object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlResponse),
+            object_id=d.get("object_id", None),
+            object_type=d.get("object_type", None),
+        )
 
 
 @dataclass
@@ -2009,22 +2325,28 @@ class WorkspaceObjectPermissionsDescription:
     def as_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermissionsDescription into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level.value
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level.value
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermissionsDescription into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.description is not None:
+            body["description"] = self.description
+        if self.permission_level is not None:
+            body["permission_level"] = self.permission_level
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsDescription:
         """Deserializes the WorkspaceObjectPermissionsDescription from a dictionary."""
-        return cls(description=d.get('description', None),
-                   permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel))
+        return cls(
+            description=d.get("description", None),
+            permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel),
+        )
 
 
 @dataclass
@@ -2041,49 +2363,58 @@ def as_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermissionsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.access_control_list:
-            body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
-        if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id
-        if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type
+            body["access_control_list"] = [v.as_dict() for v in self.access_control_list]
+        if self.workspace_object_id is not None:
+            body["workspace_object_id"] = self.workspace_object_id
+        if self.workspace_object_type is not None:
+            body["workspace_object_type"] = self.workspace_object_type
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.access_control_list: body['access_control_list'] = self.access_control_list
-        if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id
-        if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type
+        if self.access_control_list:
+            body["access_control_list"] = self.access_control_list
+        if self.workspace_object_id is not None:
+            body["workspace_object_id"] = self.workspace_object_id
+        if self.workspace_object_type is not None:
+            body["workspace_object_type"] = self.workspace_object_type
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsRequest:
         """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary."""
-        return cls(access_control_list=_repeated_dict(d, 'access_control_list',
-                                                      WorkspaceObjectAccessControlRequest),
-                   workspace_object_id=d.get('workspace_object_id', None),
-                   workspace_object_type=d.get('workspace_object_type', None))
+        return cls(
+            access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlRequest),
+            workspace_object_id=d.get("workspace_object_id", None),
+            workspace_object_type=d.get("workspace_object_type", None),
+        )
 
 
 class GitCredentialsAPI:
     """Registers personal access token for Databricks to do operations on behalf of the user.
-    
+
     See [more info].
-    
-    [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
+
+    [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               git_provider: str,
-               *,
-               git_username: Optional[str] = None,
-               personal_access_token: Optional[str] = None) -> CreateCredentialsResponse:
+    def create(
+        self,
+        git_provider: str,
+        *,
+        git_username: Optional[str] = None,
+        personal_access_token: Optional[str] = None,
+    ) -> CreateCredentialsResponse:
         """Create a credential entry.
-        
+
         Creates a Git credential entry for the user. Only one Git credential per user is supported, so any
         attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update
         existing credentials, or the DELETE endpoint to delete existing credentials.
-        
+
         :param git_provider: str
           Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
           `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
@@ -2097,75 +2428,93 @@ def create(self,
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
           providers, support may exist for other types of scoped access tokens. [Learn more].
-          
+
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
-        
+
         :returns: :class:`CreateCredentialsResponse`
         """
         body = {}
-        if git_provider is not None: body['git_provider'] = git_provider
-        if git_username is not None: body['git_username'] = git_username
-        if personal_access_token is not None: body['personal_access_token'] = personal_access_token
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/git-credentials', body=body, headers=headers)
+        if git_provider is not None:
+            body["git_provider"] = git_provider
+        if git_username is not None:
+            body["git_username"] = git_username
+        if personal_access_token is not None:
+            body["personal_access_token"] = personal_access_token
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/git-credentials", body=body, headers=headers)
         return CreateCredentialsResponse.from_dict(res)
 
     def delete(self, credential_id: int):
         """Delete a credential.
-        
+
         Deletes the specified Git credential.
-        
+
         :param credential_id: int
           The ID for the corresponding credential to access.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/git-credentials/{credential_id}', headers=headers)
+        self._api.do(
+            "DELETE",
+            f"/api/2.0/git-credentials/{credential_id}",
+            headers=headers,
+        )
 
     def get(self, credential_id: int) -> GetCredentialsResponse:
         """Get a credential entry.
-        
+
         Gets the Git credential with the specified credential ID.
-        
+
         :param credential_id: int
           The ID for the corresponding credential to access.
-        
+
         :returns: :class:`GetCredentialsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/git-credentials/{credential_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/git-credentials/{credential_id}", headers=headers)
         return GetCredentialsResponse.from_dict(res)
 
     def list(self) -> Iterator[CredentialInfo]:
         """Get Git credentials.
-        
+
         Lists the calling user's Git credentials. One credential per user is supported.
-        
+
         :returns: Iterator over :class:`CredentialInfo`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/git-credentials', headers=headers)
+        json = self._api.do("GET", "/api/2.0/git-credentials", headers=headers)
         parsed = ListCredentialsResponse.from_dict(json).credentials
         return parsed if parsed is not None else []
 
-    def update(self,
-               credential_id: int,
-               git_provider: str,
-               *,
-               git_username: Optional[str] = None,
-               personal_access_token: Optional[str] = None):
+    def update(
+        self,
+        credential_id: int,
+        git_provider: str,
+        *,
+        git_username: Optional[str] = None,
+        personal_access_token: Optional[str] = None,
+    ):
         """Update a credential.
-        
+
         Updates the specified Git credential.
-        
+
         :param credential_id: int
           The ID for the corresponding credential to access.
         :param git_provider: str
@@ -2181,45 +2530,59 @@ def update(self,
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
           providers, support may exist for other types of scoped access tokens. [Learn more].
-          
+
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
-        
-        
+
+
         """
         body = {}
-        if git_provider is not None: body['git_provider'] = git_provider
-        if git_username is not None: body['git_username'] = git_username
-        if personal_access_token is not None: body['personal_access_token'] = personal_access_token
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('PATCH', f'/api/2.0/git-credentials/{credential_id}', body=body, headers=headers)
+        if git_provider is not None:
+            body["git_provider"] = git_provider
+        if git_username is not None:
+            body["git_username"] = git_username
+        if personal_access_token is not None:
+            body["personal_access_token"] = personal_access_token
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "PATCH",
+            f"/api/2.0/git-credentials/{credential_id}",
+            body=body,
+            headers=headers,
+        )
 
 
 class ReposAPI:
     """The Repos API allows users to manage their git repos. Users can use the API to access all repos that they
     have manage permissions on.
-    
+
     Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a
     repository, committing and pushing, pulling, branch management, and visual comparison of diffs when
     committing.
-    
+
     Within Repos you can develop code in notebooks or other files and follow data science and engineering code
-    development best practices using Git for version control, collaboration, and CI/CD."""
+    development best practices using Git for version control, collaboration, and CI/CD.
+    """
 
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               url: str,
-               provider: str,
-               *,
-               path: Optional[str] = None,
-               sparse_checkout: Optional[SparseCheckout] = None) -> CreateRepoResponse:
+    def create(
+        self,
+        url: str,
+        provider: str,
+        *,
+        path: Optional[str] = None,
+        sparse_checkout: Optional[SparseCheckout] = None,
+    ) -> CreateRepoResponse:
         """Create a repo.
-        
+
         Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
         programmatically must be linked to a remote Git repo, unlike repos created in the browser.
-        
+
         :param url: str
           URL of the Git repository to be linked.
         :param provider: str
@@ -2232,91 +2595,112 @@ def create(self,
         :param sparse_checkout: :class:`SparseCheckout` (optional)
           If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
           sparse checkout after the repo is created.
-        
+
         :returns: :class:`CreateRepoResponse`
         """
         body = {}
-        if path is not None: body['path'] = path
-        if provider is not None: body['provider'] = provider
-        if sparse_checkout is not None: body['sparse_checkout'] = sparse_checkout.as_dict()
-        if url is not None: body['url'] = url
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.0/repos', body=body, headers=headers)
+        if path is not None:
+            body["path"] = path
+        if provider is not None:
+            body["provider"] = provider
+        if sparse_checkout is not None:
+            body["sparse_checkout"] = sparse_checkout.as_dict()
+        if url is not None:
+            body["url"] = url
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        res = self._api.do("POST", "/api/2.0/repos", body=body, headers=headers)
         return CreateRepoResponse.from_dict(res)
 
     def delete(self, repo_id: int):
         """Delete a repo.
-        
+
         Deletes the specified repo.
-        
+
         :param repo_id: int
           The ID for the corresponding repo to delete.
-        
-        
+
+
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        self._api.do('DELETE', f'/api/2.0/repos/{repo_id}', headers=headers)
+        self._api.do("DELETE", f"/api/2.0/repos/{repo_id}", headers=headers)
 
     def get(self, repo_id: int) -> GetRepoResponse:
         """Get a repo.
-        
+
         Returns the repo with the given repo ID.
-        
+
         :param repo_id: int
           ID of the Git folder (repo) object in the workspace.
-        
+
         :returns: :class:`GetRepoResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/repos/{repo_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/repos/{repo_id}", headers=headers)
         return GetRepoResponse.from_dict(res)
 
     def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse:
         """Get repo permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param repo_id: str
           The repo for which to get or manage permissions.
-        
+
         :returns: :class:`GetRepoPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/repos/{repo_id}/permissionLevels', headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/repos/{repo_id}/permissionLevels",
+            headers=headers,
+        )
         return GetRepoPermissionLevelsResponse.from_dict(res)
 
     def get_permissions(self, repo_id: str) -> RepoPermissions:
         """Get repo permissions.
-        
+
         Gets the permissions of a repo. Repos can inherit permissions from their root object.
-        
+
         :param repo_id: str
           The repo for which to get or manage permissions.
-        
+
         :returns: :class:`RepoPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', f'/api/2.0/permissions/repos/{repo_id}', headers=headers)
+        res = self._api.do("GET", f"/api/2.0/permissions/repos/{repo_id}", headers=headers)
         return RepoPermissions.from_dict(res)
 
-    def list(self,
-             *,
-             next_page_token: Optional[str] = None,
-             path_prefix: Optional[str] = None) -> Iterator[RepoInfo]:
+    def list(
+        self,
+        *,
+        next_page_token: Optional[str] = None,
+        path_prefix: Optional[str] = None,
+    ) -> Iterator[RepoInfo]:
         """Get repos.
-        
+
         Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
         through additional pages.
-        
+
         :param next_page_token: str (optional)
           Token used to get the next page of results. If not specified, returns the first page of results as
           well as a next page token if there are more results.
@@ -2324,59 +2708,74 @@ def list(self,
           Filters repos that have paths starting with the given path prefix. If not provided or when provided
           an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
           be served.
-        
+
         :returns: Iterator over :class:`RepoInfo`
         """
 
         query = {}
-        if next_page_token is not None: query['next_page_token'] = next_page_token
-        if path_prefix is not None: query['path_prefix'] = path_prefix
-        headers = {'Accept': 'application/json', }
+        if next_page_token is not None:
+            query["next_page_token"] = next_page_token
+        if path_prefix is not None:
+            query["path_prefix"] = path_prefix
+        headers = {
+            "Accept": "application/json",
+        }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/repos', query=query, headers=headers)
-            if 'repos' in json:
-                for v in json['repos']:
+            json = self._api.do("GET", "/api/2.0/repos", query=query, headers=headers)
+            if "repos" in json:
+                for v in json["repos"]:
                     yield RepoInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
+            if "next_page_token" not in json or not json["next_page_token"]:
                 return
-            query['next_page_token'] = json['next_page_token']
+            query["next_page_token"] = json["next_page_token"]
 
     def set_permissions(
-            self,
-            repo_id: str,
-            *,
-            access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions:
+        self,
+        repo_id: str,
+        *,
+        access_control_list: Optional[List[RepoAccessControlRequest]] = None,
+    ) -> RepoPermissions:
         """Set repo permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their root object.
-        
+
         :param repo_id: str
           The repo for which to get or manage permissions.
         :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
-        
+
         :returns: :class:`RepoPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT', f'/api/2.0/permissions/repos/{repo_id}', body=body, headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/repos/{repo_id}",
+            body=body,
+            headers=headers,
+        )
         return RepoPermissions.from_dict(res)
 
-    def update(self,
-               repo_id: int,
-               *,
-               branch: Optional[str] = None,
-               sparse_checkout: Optional[SparseCheckoutUpdate] = None,
-               tag: Optional[str] = None):
+    def update(
+        self,
+        repo_id: int,
+        *,
+        branch: Optional[str] = None,
+        sparse_checkout: Optional[SparseCheckoutUpdate] = None,
+        tag: Optional[str] = None,
+    ):
         """Update a repo.
-        
+
         Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same
         branch.
-        
+
         :param repo_id: int
           ID of the Git folder (repo) object in the workspace.
         :param branch: str (optional)
@@ -2388,48 +2787,63 @@ def update(self,
           Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo
           in a detached HEAD state. Before committing new changes, you must update the repo to a branch
           instead of the detached HEAD.
-        
-        
+
+
         """
         body = {}
-        if branch is not None: body['branch'] = branch
-        if sparse_checkout is not None: body['sparse_checkout'] = sparse_checkout.as_dict()
-        if tag is not None: body['tag'] = tag
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if branch is not None:
+            body["branch"] = branch
+        if sparse_checkout is not None:
+            body["sparse_checkout"] = sparse_checkout.as_dict()
+        if tag is not None:
+            body["tag"] = tag
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('PATCH', f'/api/2.0/repos/{repo_id}', body=body, headers=headers)
+        self._api.do("PATCH", f"/api/2.0/repos/{repo_id}", body=body, headers=headers)
 
     def update_permissions(
-            self,
-            repo_id: str,
-            *,
-            access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions:
+        self,
+        repo_id: str,
+        *,
+        access_control_list: Optional[List[RepoAccessControlRequest]] = None,
+    ) -> RepoPermissions:
         """Update repo permissions.
-        
+
         Updates the permissions on a repo. Repos can inherit permissions from their root object.
-        
+
         :param repo_id: str
           The repo for which to get or manage permissions.
         :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
-        
+
         :returns: :class:`RepoPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH', f'/api/2.0/permissions/repos/{repo_id}', body=body, headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/repos/{repo_id}",
+            body=body,
+            headers=headers,
+        )
         return RepoPermissions.from_dict(res)
 
 
 class SecretsAPI:
     """The Secrets API allows you to manage secrets, secret scopes, and access permissions.
-    
+
     Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of
     directly entering your credentials into a notebook, use Databricks secrets to store your credentials and
     reference them in notebooks and jobs.
-    
+
     Administrators, secret creators, and users granted permission can read Databricks secrets. While
     Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not
     possible to prevent such users from reading secrets."""
@@ -2437,17 +2851,19 @@ class SecretsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create_scope(self,
-                     scope: str,
-                     *,
-                     backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None,
-                     initial_manage_principal: Optional[str] = None,
-                     scope_backend_type: Optional[ScopeBackendType] = None):
+    def create_scope(
+        self,
+        scope: str,
+        *,
+        backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None,
+        initial_manage_principal: Optional[str] = None,
+        scope_backend_type: Optional[ScopeBackendType] = None,
+    ):
         """Create a new secret scope.
-        
+
         The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not
         exceed 128 characters.
-        
+
         :param scope: str
           Scope name requested by the user. Scope names are unique.
         :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional)
@@ -2456,269 +2872,323 @@ def create_scope(self,
           The principal that is initially granted `MANAGE` permission to the created scope.
         :param scope_backend_type: :class:`ScopeBackendType` (optional)
           The backend type the scope will be created with. If not specified, will default to `DATABRICKS`
-        
-        
+
+
         """
         body = {}
         if backend_azure_keyvault is not None:
-            body['backend_azure_keyvault'] = backend_azure_keyvault.as_dict()
-        if initial_manage_principal is not None: body['initial_manage_principal'] = initial_manage_principal
-        if scope is not None: body['scope'] = scope
-        if scope_backend_type is not None: body['scope_backend_type'] = scope_backend_type.value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/secrets/scopes/create', body=body, headers=headers)
+            body["backend_azure_keyvault"] = backend_azure_keyvault.as_dict()
+        if initial_manage_principal is not None:
+            body["initial_manage_principal"] = initial_manage_principal
+        if scope is not None:
+            body["scope"] = scope
+        if scope_backend_type is not None:
+            body["scope_backend_type"] = scope_backend_type.value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do(
+            "POST",
+            "/api/2.0/secrets/scopes/create",
+            body=body,
+            headers=headers,
+        )
 
     def delete_acl(self, scope: str, principal: str):
         """Delete an ACL.
-        
+
         Deletes the given ACL on the given scope.
-        
+
         Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no
         such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have
         permission to make this API call.
-        
+
         :param scope: str
           The name of the scope to remove permissions from.
         :param principal: str
           The principal to remove an existing ACL from.
-        
-        
+
+
         """
         body = {}
-        if principal is not None: body['principal'] = principal
-        if scope is not None: body['scope'] = scope
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if principal is not None:
+            body["principal"] = principal
+        if scope is not None:
+            body["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/secrets/acls/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/secrets/acls/delete", body=body, headers=headers)
 
     def delete_scope(self, scope: str):
         """Delete a secret scope.
-        
+
         Deletes a secret scope.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user
         does not have permission to make this API call.
-        
+
         :param scope: str
           Name of the scope to delete.
-        
-        
+
+
         """
         body = {}
-        if scope is not None: body['scope'] = scope
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if scope is not None:
+            body["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/secrets/scopes/delete', body=body, headers=headers)
+        self._api.do(
+            "POST",
+            "/api/2.0/secrets/scopes/delete",
+            body=body,
+            headers=headers,
+        )
 
     def delete_secret(self, scope: str, key: str):
         """Delete a secret.
-        
+
         Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the
         secret scope.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED`
         if the user does not have permission to make this API call.
-        
+
         :param scope: str
           The name of the scope that contains the secret to delete.
         :param key: str
           Name of the secret to delete.
-        
-        
+
+
         """
         body = {}
-        if key is not None: body['key'] = key
-        if scope is not None: body['scope'] = scope
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if key is not None:
+            body["key"] = key
+        if scope is not None:
+            body["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/secrets/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/secrets/delete", body=body, headers=headers)
 
     def get_acl(self, scope: str, principal: str) -> AclItem:
         """Get secret ACL details.
-        
+
         Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE`
         permission to invoke this API.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
         user does not have permission to make this API call.
-        
+
         :param scope: str
           The name of the scope to fetch ACL information from.
         :param principal: str
           The principal to fetch ACL information for.
-        
+
         :returns: :class:`AclItem`
         """
 
         query = {}
-        if principal is not None: query['principal'] = principal
-        if scope is not None: query['scope'] = scope
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/secrets/acls/get', query=query, headers=headers)
+        if principal is not None:
+            query["principal"] = principal
+        if scope is not None:
+            query["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.0/secrets/acls/get", query=query, headers=headers)
         return AclItem.from_dict(res)
 
     def get_secret(self, scope: str, key: str) -> GetSecretResponse:
         """Get a secret.
-        
+
         Gets the bytes representation of a secret value for the specified scope and key.
-        
+
         Users need the READ permission to make this call.
-        
+
         Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the
         caller in DBUtils and the type the data is decoded into.
-        
+
         Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
         ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
-        
+
         :param scope: str
           The name of the scope to fetch secret information from.
         :param key: str
           The key to fetch secret for.
-        
+
         :returns: :class:`GetSecretResponse`
         """
 
         query = {}
-        if key is not None: query['key'] = key
-        if scope is not None: query['scope'] = scope
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/secrets/get', query=query, headers=headers)
+        if key is not None:
+            query["key"] = key
+        if scope is not None:
+            query["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.0/secrets/get", query=query, headers=headers)
         return GetSecretResponse.from_dict(res)
 
     def list_acls(self, scope: str) -> Iterator[AclItem]:
         """Lists ACLs.
-        
+
         List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
         user does not have permission to make this API call.
-        
+
         :param scope: str
           The name of the scope to fetch ACL information from.
-        
+
         :returns: Iterator over :class:`AclItem`
         """
 
         query = {}
-        if scope is not None: query['scope'] = scope
-        headers = {'Accept': 'application/json', }
+        if scope is not None:
+            query["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/secrets/acls/list', query=query, headers=headers)
+        json = self._api.do("GET", "/api/2.0/secrets/acls/list", query=query, headers=headers)
         parsed = ListAclsResponse.from_dict(json).items
         return parsed if parsed is not None else []
 
     def list_scopes(self) -> Iterator[SecretScope]:
         """List all scopes.
-        
+
         Lists all secret scopes available in the workspace.
-        
+
         Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
-        
+
         :returns: Iterator over :class:`SecretScope`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/secrets/scopes/list', headers=headers)
+        json = self._api.do("GET", "/api/2.0/secrets/scopes/list", headers=headers)
         parsed = ListScopesResponse.from_dict(json).scopes
         return parsed if parsed is not None else []
 
     def list_secrets(self, scope: str) -> Iterator[SecretMetadata]:
         """List secret keys.
-        
+
         Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data
         cannot be retrieved using this API. Users need the READ permission to make this call.
-        
+
         The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if
         no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make
         this API call.
-        
+
         :param scope: str
           The name of the scope to list secrets within.
-        
+
         :returns: Iterator over :class:`SecretMetadata`
         """
 
         query = {}
-        if scope is not None: query['scope'] = scope
-        headers = {'Accept': 'application/json', }
+        if scope is not None:
+            query["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+        }
 
-        json = self._api.do('GET', '/api/2.0/secrets/list', query=query, headers=headers)
+        json = self._api.do("GET", "/api/2.0/secrets/list", query=query, headers=headers)
         parsed = ListSecretsResponse.from_dict(json).secrets
         return parsed if parsed is not None else []
 
     def put_acl(self, scope: str, principal: str, permission: AclPermission):
         """Create/update an ACL.
-        
+
         Creates or overwrites the Access Control List (ACL) associated with the given principal (user or
         group) on the specified scope point.
-        
+
         In general, a user or group will use the most powerful permission available to them, and permissions
         are ordered as follows:
-        
+
         * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to
         read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what
         secrets are available.
-        
+
         Note that in general, secret values can only be read from within a command on a cluster (for example,
         through a notebook). There is no API to read the actual secret value material outside of a cluster.
         However, the user's permission will be applied based on who is executing the command, and they must
         have at least READ permission.
-        
+
         Users must have the `MANAGE` permission to invoke this API.
-        
+
         The principal is a user or group name corresponding to an existing Databricks principal to be granted
         or revoked access.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a
         permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or
         principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API
         call.
-        
+
         :param scope: str
           The name of the scope to apply permissions to.
         :param principal: str
           The principal in which the permission is applied.
         :param permission: :class:`AclPermission`
           The permission level applied to the principal.
-        
-        
+
+
         """
         body = {}
-        if permission is not None: body['permission'] = permission.value
-        if principal is not None: body['principal'] = principal
-        if scope is not None: body['scope'] = scope
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if permission is not None:
+            body["permission"] = permission.value
+        if principal is not None:
+            body["principal"] = principal
+        if scope is not None:
+            body["scope"] = scope
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/secrets/acls/put', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/secrets/acls/put", body=body, headers=headers)
 
-    def put_secret(self,
-                   scope: str,
-                   key: str,
-                   *,
-                   bytes_value: Optional[str] = None,
-                   string_value: Optional[str] = None):
+    def put_secret(
+        self,
+        scope: str,
+        key: str,
+        *,
+        bytes_value: Optional[str] = None,
+        string_value: Optional[str] = None,
+    ):
         """Add a secret.
-        
+
         Inserts a secret under the provided scope with the given name. If a secret already exists with the
         same name, this command overwrites the existing secret's value. The server encrypts the secret using
         the secret scope's encryption settings before storing it.
-        
+
         You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of
         alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The
         maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000.
-        
+
         The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine
         the value returned when the secret value is requested. Exactly one must be specified.
-        
+
         Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if
         maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or
         value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this
         API call.
-        
+
         :param scope: str
           The name of the scope to which the secret will be associated with.
         :param key: str
@@ -2727,22 +3197,29 @@ def put_secret(self,
           If specified, value will be stored as bytes.
         :param string_value: str (optional)
           If specified, note that the value will be stored in UTF-8 (MB4) form.
-        
-        
+
+
         """
         body = {}
-        if bytes_value is not None: body['bytes_value'] = bytes_value
-        if key is not None: body['key'] = key
-        if scope is not None: body['scope'] = scope
-        if string_value is not None: body['string_value'] = string_value
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if bytes_value is not None:
+            body["bytes_value"] = bytes_value
+        if key is not None:
+            body["key"] = key
+        if scope is not None:
+            body["scope"] = scope
+        if string_value is not None:
+            body["string_value"] = string_value
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/secrets/put', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/secrets/put", body=body, headers=headers)
 
 
 class WorkspaceAPI:
     """The Workspace API allows you to list, import, export, and delete notebooks and folders.
-    
+
     A notebook is a web-based interface to a document that contains runnable code, visualizations, and
     explanatory text."""
 
@@ -2751,157 +3228,183 @@ def __init__(self, api_client):
 
     def delete(self, path: str, *, recursive: Optional[bool] = None):
         """Delete a workspace object.
-        
+
         Deletes an object or a directory (and optionally recursively deletes all objects in the directory). *
         If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a
         non-empty directory and `recursive` is set to `false`, this call returns an error
         `DIRECTORY_NOT_EMPTY`.
-        
+
         Object deletion cannot be undone and deleting a directory recursively is not atomic.
-        
+
         :param path: str
           The absolute path of the notebook or directory.
         :param recursive: bool (optional)
           The flag that specifies whether to delete the object recursively. It is `false` by default. Please
           note this deleting directory is not atomic. If it fails in the middle, some of objects under this
           directory may be deleted and cannot be undone.
-        
-        
+
+
         """
         body = {}
-        if path is not None: body['path'] = path
-        if recursive is not None: body['recursive'] = recursive
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if path is not None:
+            body["path"] = path
+        if recursive is not None:
+            body["recursive"] = recursive
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/workspace/delete', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers)
 
     def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse:
         """Export a workspace object.
-        
+
         Exports an object or the contents of an entire directory.
-        
+
         If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
-        
+
         If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`.
         Currently, this API does not support exporting a library.
-        
+
         :param path: str
           The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`,
           `SOURCE`, and `AUTO` format.
         :param format: :class:`ExportFormat` (optional)
           This specifies the format of the exported file. By default, this is `SOURCE`.
-          
+
           The value is case sensitive.
-          
+
           - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook
           entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported
           as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format.
           Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to
           R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type.
           Directory exports will include notebooks and workspace files.
-        
+
         :returns: :class:`ExportResponse`
         """
 
         query = {}
-        if format is not None: query['format'] = format.value
-        if path is not None: query['path'] = path
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', '/api/2.0/workspace/export', query=query, headers=headers)
+        if format is not None:
+            query["format"] = format.value
+        if path is not None:
+            query["path"] = path
+        headers = {
+            "Accept": "application/json",
+        }
+
+        res = self._api.do("GET", "/api/2.0/workspace/export", query=query, headers=headers)
         return ExportResponse.from_dict(res)
 
-    def get_permission_levels(self, workspace_object_type: str,
-                              workspace_object_id: str) -> GetWorkspaceObjectPermissionLevelsResponse:
+    def get_permission_levels(
+        self, workspace_object_type: str, workspace_object_id: str
+    ) -> GetWorkspaceObjectPermissionLevelsResponse:
         """Get workspace object permission levels.
-        
+
         Gets the permission levels that a user can have on an object.
-        
+
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
         :param workspace_object_id: str
           The workspace object for which to get or manage permissions.
-        
+
         :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
         res = self._api.do(
-            'GET',
-            f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}/permissionLevels',
-            headers=headers)
+            "GET",
+            f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}/permissionLevels",
+            headers=headers,
+        )
         return GetWorkspaceObjectPermissionLevelsResponse.from_dict(res)
 
-    def get_permissions(self, workspace_object_type: str,
-                        workspace_object_id: str) -> WorkspaceObjectPermissions:
+    def get_permissions(self, workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions:
         """Get workspace object permissions.
-        
+
         Gets the permissions of a workspace object. Workspace objects can inherit permissions from their
         parent objects or root object.
-        
+
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
         :param workspace_object_id: str
           The workspace object for which to get or manage permissions.
-        
+
         :returns: :class:`WorkspaceObjectPermissions`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}',
-                           headers=headers)
+        res = self._api.do(
+            "GET",
+            f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}",
+            headers=headers,
+        )
         return WorkspaceObjectPermissions.from_dict(res)
 
     def get_status(self, path: str) -> ObjectInfo:
         """Get status.
-        
+
         Gets the status of an object or a directory. If `path` does not exist, this call returns an error
         `RESOURCE_DOES_NOT_EXIST`.
-        
+
         :param path: str
           The absolute path of the notebook or directory.
-        
+
         :returns: :class:`ObjectInfo`
         """
 
         query = {}
-        if path is not None: query['path'] = path
-        headers = {'Accept': 'application/json', }
+        if path is not None:
+            query["path"] = path
+        headers = {
+            "Accept": "application/json",
+        }
 
-        res = self._api.do('GET', '/api/2.0/workspace/get-status', query=query, headers=headers)
+        res = self._api.do(
+            "GET",
+            "/api/2.0/workspace/get-status",
+            query=query,
+            headers=headers,
+        )
         return ObjectInfo.from_dict(res)
 
-    def import_(self,
-                path: str,
-                *,
-                content: Optional[str] = None,
-                format: Optional[ImportFormat] = None,
-                language: Optional[Language] = None,
-                overwrite: Optional[bool] = None):
+    def import_(
+        self,
+        path: str,
+        *,
+        content: Optional[str] = None,
+        format: Optional[ImportFormat] = None,
+        language: Optional[Language] = None,
+        overwrite: Optional[bool] = None,
+    ):
         """Import a workspace object.
-        
+
         Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
         If `path` already exists and `overwrite` is set to `false`, this call returns an error
         `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE`
         format with the `language` field unset. To import a single file as `SOURCE`, you must set the
         `language` field.
-        
+
         :param path: str
           The absolute path of the object or directory. Importing a directory is only supported for the `DBC`
           and `SOURCE` formats.
         :param content: str (optional)
           The base64-encoded content. This has a limit of 10 MB.
-          
+
           If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.
           This parameter might be absent, and instead a posted file is used.
         :param format: :class:`ImportFormat` (optional)
           This specifies the format of the file to be imported.
-          
+
           The value is case sensitive.
-          
+
           - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
           content provided in the request. If the item is imported as a notebook, then the item's extension is
           automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`:
@@ -2913,94 +3416,115 @@ def import_(self,
         :param overwrite: bool (optional)
           The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC`
           format, `overwrite` is not supported since it may contain a directory.
-        
-        
+
+
         """
         body = {}
-        if content is not None: body['content'] = content
-        if format is not None: body['format'] = format.value
-        if language is not None: body['language'] = language.value
-        if overwrite is not None: body['overwrite'] = overwrite
-        if path is not None: body['path'] = path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        self._api.do('POST', '/api/2.0/workspace/import', body=body, headers=headers)
+        if content is not None:
+            body["content"] = content
+        if format is not None:
+            body["format"] = format.value
+        if language is not None:
+            body["language"] = language.value
+        if overwrite is not None:
+            body["overwrite"] = overwrite
+        if path is not None:
+            body["path"] = path
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
+
+        self._api.do("POST", "/api/2.0/workspace/import", body=body, headers=headers)
 
     def list(self, path: str, *, notebooks_modified_after: Optional[int] = None) -> Iterator[ObjectInfo]:
         """List contents.
-        
+
         Lists the contents of a directory, or the object if it is not a directory. If the input path does not
         exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
-        
+
         :param path: str
           The absolute path of the notebook or directory.
         :param notebooks_modified_after: int (optional)
           UTC timestamp in milliseconds
-        
+
         :returns: Iterator over :class:`ObjectInfo`
         """
 
         query = {}
-        if notebooks_modified_after is not None: query['notebooks_modified_after'] = notebooks_modified_after
-        if path is not None: query['path'] = path
-        headers = {'Accept': 'application/json', }
-
-        json = self._api.do('GET', '/api/2.0/workspace/list', query=query, headers=headers)
+        if notebooks_modified_after is not None:
+            query["notebooks_modified_after"] = notebooks_modified_after
+        if path is not None:
+            query["path"] = path
+        headers = {
+            "Accept": "application/json",
+        }
+
+        json = self._api.do("GET", "/api/2.0/workspace/list", query=query, headers=headers)
         parsed = ListResponse.from_dict(json).objects
         return parsed if parsed is not None else []
 
     def mkdirs(self, path: str):
         """Create a directory.
-        
+
         Creates the specified directory (and necessary parent directories if they do not exist). If there is
         an object (not a directory) at any prefix of the input path, this call returns an error
         `RESOURCE_ALREADY_EXISTS`.
-        
+
         Note that if this operation fails it may have succeeded in creating some of the necessary parent
         directories.
-        
+
         :param path: str
           The absolute path of the directory. If the parent directories do not exist, it will also create
           them. If the directory already exists, this command will do nothing and succeed.
-        
-        
+
+
         """
         body = {}
-        if path is not None: body['path'] = path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        if path is not None:
+            body["path"] = path
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        self._api.do('POST', '/api/2.0/workspace/mkdirs', body=body, headers=headers)
+        self._api.do("POST", "/api/2.0/workspace/mkdirs", body=body, headers=headers)
 
     def set_permissions(
         self,
         workspace_object_type: str,
         workspace_object_id: str,
         *,
-        access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None
+        access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None,
     ) -> WorkspaceObjectPermissions:
         """Set workspace object permissions.
-        
+
         Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
         permissions if none are specified. Objects can inherit permissions from their parent objects or root
         object.
-        
+
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
         :param workspace_object_id: str
           The workspace object for which to get or manage permissions.
         :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
-        
+
         :returns: :class:`WorkspaceObjectPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PUT',
-                           f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PUT",
+            f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}",
+            body=body,
+            headers=headers,
+        )
         return WorkspaceObjectPermissions.from_dict(res)
 
     def update_permissions(
@@ -3008,28 +3532,33 @@ def update_permissions(
         workspace_object_type: str,
         workspace_object_id: str,
         *,
-        access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None
+        access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None,
     ) -> WorkspaceObjectPermissions:
         """Update workspace object permissions.
-        
+
         Updates the permissions on a workspace object. Workspace objects can inherit permissions from their
         parent objects or root object.
-        
+
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
         :param workspace_object_id: str
           The workspace object for which to get or manage permissions.
         :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
-        
+
         :returns: :class:`WorkspaceObjectPermissions`
         """
         body = {}
         if access_control_list is not None:
-            body['access_control_list'] = [v.as_dict() for v in access_control_list]
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+            body["access_control_list"] = [v.as_dict() for v in access_control_list]
+        headers = {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+        }
 
-        res = self._api.do('PATCH',
-                           f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}',
-                           body=body,
-                           headers=headers)
+        res = self._api.do(
+            "PATCH",
+            f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}",
+            body=body,
+            headers=headers,
+        )
         return WorkspaceObjectPermissions.from_dict(res)
diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py
index 45adfe51d..cca5344c5 100644
--- a/databricks/sdk/useragent.py
+++ b/databricks/sdk/useragent.py
@@ -8,9 +8,9 @@
 from .version import __version__
 
 # Constants
-RUNTIME_KEY = 'runtime'
-CICD_KEY = 'cicd'
-AUTH_KEY = 'auth'
+RUNTIME_KEY = "runtime"
+CICD_KEY = "cicd"
+AUTH_KEY = "auth"
 
 _product_name = "unknown"
 _product_version = "0.0.0"
@@ -20,15 +20,17 @@
 _extra = []
 
 # Precompiled regex patterns
-alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$')
+alphanum_pattern = re.compile(r"^[a-zA-Z0-9_.+-]+$")
 
 # official https://semver.org/ recommendation: https://regex101.com/r/Ly7O1x/
 # with addition of "x" wildcards for minor/patch versions. Also, patch version may be omitted.
-semver_pattern = re.compile(r"^"
-                            r"(?P0|[1-9]\d*)\.(?Px|0|[1-9]\d*)(\.(?Px|0|[1-9x]\d*))?"
-                            r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
-                            r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
-                            r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
+semver_pattern = re.compile(
+    r"^"
+    r"(?P0|[1-9]\d*)\.(?Px|0|[1-9]\d*)(\.(?Px|0|[1-9x]\d*))?"
+    r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)"
+    r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?"
+    r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
+)
 
 
 def _match_alphanum(value):
@@ -56,7 +58,7 @@ def with_product(name: str, version: str):
     global _product_name, _product_version
     _match_alphanum(name)
     _match_semver(version)
-    logger.debug(f'Changing product from {_product_name}/{_product_version} to {name}/{version}')
+    logger.debug(f"Changing product from {_product_name}/{_product_version} to {name}/{version}")
     _product_name = name
     _product_version = version
 
@@ -80,7 +82,7 @@ def with_extra(key: str, value: str):
     global _extra
     _match_alphanum(key)
     _match_alphanum_or_semver(value)
-    logger.debug(f'Adding {key}/{value} to User-Agent')
+    logger.debug(f"Adding {key}/{value} to User-Agent")
     _extra.append((key, value))
 
 
@@ -114,22 +116,24 @@ def _get_upstream_user_agent_info() -> List[Tuple[str, str]]:
 
 def _get_runtime_info() -> List[Tuple[str, str]]:
     """[INTERNAL API] Return the runtime version if running on Databricks."""
-    if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
-        runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION']
-        if runtime_version != '':
+    if "DATABRICKS_RUNTIME_VERSION" in os.environ:
+        runtime_version = os.environ["DATABRICKS_RUNTIME_VERSION"]
+        if runtime_version != "":
             runtime_version = _sanitize_header_value(runtime_version)
-            return [('runtime', runtime_version)]
+            return [("runtime", runtime_version)]
     return []
 
 
 def _sanitize_header_value(value: str) -> str:
-    value = value.replace(' ', '-')
-    value = value.replace('/', '-')
+    value = value.replace(" ", "-")
+    value = value.replace("/", "-")
     return value
 
 
-def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
-              other_info: Optional[List[Tuple[str, str]]] = None) -> str:
+def to_string(
+    alternate_product_info: Optional[Tuple[str, str]] = None,
+    other_info: Optional[List[Tuple[str, str]]] = None,
+) -> str:
     """Compute the full User-Agent header.
 
     The User-Agent header contains the product name, version, and other metadata that is submitted to Databricks on
@@ -141,8 +145,13 @@ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
         base.append(alternate_product_info)
     else:
         base.append((_product_name, _product_version))
-    base.extend([("databricks-sdk-py", __version__), ("python", platform.python_version()),
-                 ("os", platform.uname().system.lower()), ])
+    base.extend(
+        [
+            ("databricks-sdk-py", __version__),
+            ("python", platform.python_version()),
+            ("os", platform.uname().system.lower()),
+        ]
+    )
     if other_info:
         base.extend(other_info)
     base.extend(_extra)
@@ -162,7 +171,12 @@ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
     "circle": [("CIRCLECI", "true")],
     "travis": [("TRAVIS", "true")],
     "bitbucket": [("BITBUCKET_BUILD_NUMBER", "")],
-    "google-cloud-build": [("PROJECT_ID", ""), ("BUILD_ID", ""), ("PROJECT_NUMBER", ""), ("LOCATION", "")],
+    "google-cloud-build": [
+        ("PROJECT_ID", ""),
+        ("BUILD_ID", ""),
+        ("PROJECT_NUMBER", ""),
+        ("LOCATION", ""),
+    ],
     "aws-code-build": [("CODEBUILD_BUILD_ARN", "")],
     "tf-cloud": [("TFC_RUN_ID", "")],
 }
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index c09c695fd..7f532dc4a 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.44.1'
+__version__ = "0.44.1"
diff --git a/examples/account/budgets/create_budgets.py b/examples/account/budgets/create_budgets.py
index 030cc8a57..8f7cd5876 100755
--- a/examples/account/budgets/create_budgets.py
+++ b/examples/account/budgets/create_budgets.py
@@ -5,26 +5,36 @@
 
 a = AccountClient()
 
-created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
-    display_name=f'sdk-{time.time_ns()}',
-    filter=billing.BudgetConfigurationFilter(tags=[
-        billing.BudgetConfigurationFilterTagClause(key="tagName",
-                                                   value=billing.BudgetConfigurationFilterClause(
-                                                       operator=billing.BudgetConfigurationFilterOperator.IN,
-                                                       values=["all"]))
-    ]),
-    alert_configurations=[
-        billing.CreateBudgetConfigurationBudgetAlertConfigurations(
-            time_period=billing.AlertConfigurationTimePeriod.MONTH,
-            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
-            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
-            quantity_threshold="100",
-            action_configurations=[
-                billing.CreateBudgetConfigurationBudgetActionConfigurations(
-                    action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
-                    target="admin@example.com")
-            ])
-    ]))
+created = a.budgets.create(
+    budget=billing.CreateBudgetConfigurationBudget(
+        display_name=f"sdk-{time.time_ns()}",
+        filter=billing.BudgetConfigurationFilter(
+            tags=[
+                billing.BudgetConfigurationFilterTagClause(
+                    key="tagName",
+                    value=billing.BudgetConfigurationFilterClause(
+                        operator=billing.BudgetConfigurationFilterOperator.IN,
+                        values=["all"],
+                    ),
+                )
+            ]
+        ),
+        alert_configurations=[
+            billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+                time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                quantity_threshold="100",
+                action_configurations=[
+                    billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                        action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                        target="admin@example.com",
+                    )
+                ],
+            )
+        ],
+    )
+)
 
 # cleanup
 a.budgets.delete(budget_id=created.budget.budget_configuration_id)
diff --git a/examples/account/budgets/get_budgets.py b/examples/account/budgets/get_budgets.py
index 9c2973110..9144a5921 100755
--- a/examples/account/budgets/get_budgets.py
+++ b/examples/account/budgets/get_budgets.py
@@ -5,26 +5,36 @@
 
 a = AccountClient()
 
-created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
-    display_name=f'sdk-{time.time_ns()}',
-    filter=billing.BudgetConfigurationFilter(tags=[
-        billing.BudgetConfigurationFilterTagClause(key="tagName",
-                                                   value=billing.BudgetConfigurationFilterClause(
-                                                       operator=billing.BudgetConfigurationFilterOperator.IN,
-                                                       values=["all"]))
-    ]),
-    alert_configurations=[
-        billing.CreateBudgetConfigurationBudgetAlertConfigurations(
-            time_period=billing.AlertConfigurationTimePeriod.MONTH,
-            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
-            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
-            quantity_threshold="100",
-            action_configurations=[
-                billing.CreateBudgetConfigurationBudgetActionConfigurations(
-                    action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
-                    target="admin@example.com")
-            ])
-    ]))
+created = a.budgets.create(
+    budget=billing.CreateBudgetConfigurationBudget(
+        display_name=f"sdk-{time.time_ns()}",
+        filter=billing.BudgetConfigurationFilter(
+            tags=[
+                billing.BudgetConfigurationFilterTagClause(
+                    key="tagName",
+                    value=billing.BudgetConfigurationFilterClause(
+                        operator=billing.BudgetConfigurationFilterOperator.IN,
+                        values=["all"],
+                    ),
+                )
+            ]
+        ),
+        alert_configurations=[
+            billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+                time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                quantity_threshold="100",
+                action_configurations=[
+                    billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                        action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                        target="admin@example.com",
+                    )
+                ],
+            )
+        ],
+    )
+)
 
 by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id)
 
diff --git a/examples/account/budgets/update_budgets.py b/examples/account/budgets/update_budgets.py
index 399770058..d19786194 100755
--- a/examples/account/budgets/update_budgets.py
+++ b/examples/account/budgets/update_budgets.py
@@ -5,38 +5,53 @@
 
 a = AccountClient()
 
-created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget(
-    display_name=f'sdk-{time.time_ns()}',
-    filter=billing.BudgetConfigurationFilter(tags=[
-        billing.BudgetConfigurationFilterTagClause(key="tagName",
-                                                   value=billing.BudgetConfigurationFilterClause(
-                                                       operator=billing.BudgetConfigurationFilterOperator.IN,
-                                                       values=["all"]))
-    ]),
-    alert_configurations=[
-        billing.CreateBudgetConfigurationBudgetAlertConfigurations(
-            time_period=billing.AlertConfigurationTimePeriod.MONTH,
-            quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
-            trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
-            quantity_threshold="100",
-            action_configurations=[
-                billing.CreateBudgetConfigurationBudgetActionConfigurations(
-                    action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
-                    target="admin@example.com")
-            ])
-    ]))
+created = a.budgets.create(
+    budget=billing.CreateBudgetConfigurationBudget(
+        display_name=f"sdk-{time.time_ns()}",
+        filter=billing.BudgetConfigurationFilter(
+            tags=[
+                billing.BudgetConfigurationFilterTagClause(
+                    key="tagName",
+                    value=billing.BudgetConfigurationFilterClause(
+                        operator=billing.BudgetConfigurationFilterOperator.IN,
+                        values=["all"],
+                    ),
+                )
+            ]
+        ),
+        alert_configurations=[
+            billing.CreateBudgetConfigurationBudgetAlertConfigurations(
+                time_period=billing.AlertConfigurationTimePeriod.MONTH,
+                quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
+                trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
+                quantity_threshold="100",
+                action_configurations=[
+                    billing.CreateBudgetConfigurationBudgetActionConfigurations(
+                        action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION,
+                        target="admin@example.com",
+                    )
+                ],
+            )
+        ],
+    )
+)
 
 _ = a.budgets.update(
     budget_id=created.budget.budget_configuration_id,
     budget=billing.UpdateBudgetConfigurationBudget(
         budget_configuration_id=created.budget.budget_configuration_id,
-        display_name=f'sdk-{time.time_ns()}',
-        filter=billing.BudgetConfigurationFilter(tags=[
-            billing.BudgetConfigurationFilterTagClause(
-                key="tagName",
-                value=billing.BudgetConfigurationFilterClause(
-                    operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"]))
-        ]),
+        display_name=f"sdk-{time.time_ns()}",
+        filter=billing.BudgetConfigurationFilter(
+            tags=[
+                billing.BudgetConfigurationFilterTagClause(
+                    key="tagName",
+                    value=billing.BudgetConfigurationFilterClause(
+                        operator=billing.BudgetConfigurationFilterOperator.IN,
+                        values=["all"],
+                    ),
+                )
+            ]
+        ),
         alert_configurations=[
             billing.AlertConfiguration(
                 alert_configuration_id=created.budget.alert_configurations[0].alert_configuration_id,
@@ -44,8 +59,11 @@
                 quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD,
                 trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED,
                 quantity_threshold="50",
-                action_configurations=created.budget.alert_configurations[0].action_configurations)
-        ]))
+                action_configurations=created.budget.alert_configurations[0].action_configurations,
+            )
+        ],
+    ),
+)
 
 # cleanup
 a.budgets.delete(budget_id=created.budget.budget_configuration_id)
diff --git a/examples/account/credentials/create_credentials.py b/examples/account/credentials/create_credentials.py
index 9885467c5..8c2b6084f 100755
--- a/examples/account/credentials/create_credentials.py
+++ b/examples/account/credentials/create_credentials.py
@@ -7,9 +7,11 @@
 a = AccountClient()
 
 role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
+    ),
+)
 
 # cleanup
 a.credentials.delete(credentials_id=role.credentials_id)
diff --git a/examples/account/credentials/create_log_delivery.py b/examples/account/credentials/create_log_delivery.py
index 28b521cd6..8971441f9 100755
--- a/examples/account/credentials/create_log_delivery.py
+++ b/examples/account/credentials/create_log_delivery.py
@@ -7,9 +7,11 @@
 a = AccountClient()
 
 creds = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_LOGDELIVERY_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
+    ),
+)
 
 # cleanup
 a.credentials.delete(credentials_id=creds.credentials_id)
diff --git a/examples/account/credentials/create_workspaces.py b/examples/account/credentials/create_workspaces.py
index 9885467c5..8c2b6084f 100755
--- a/examples/account/credentials/create_workspaces.py
+++ b/examples/account/credentials/create_workspaces.py
@@ -7,9 +7,11 @@
 a = AccountClient()
 
 role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
+    ),
+)
 
 # cleanup
 a.credentials.delete(credentials_id=role.credentials_id)
diff --git a/examples/account/credentials/get_credentials.py b/examples/account/credentials/get_credentials.py
index 847fc70ba..b5d8be457 100755
--- a/examples/account/credentials/get_credentials.py
+++ b/examples/account/credentials/get_credentials.py
@@ -7,9 +7,11 @@
 a = AccountClient()
 
 role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
+    ),
+)
 
 by_id = a.credentials.get(credentials_id=role.credentials_id)
 
diff --git a/examples/account/encryption_keys/create_encryption_keys.py b/examples/account/encryption_keys/create_encryption_keys.py
index 83201e135..eaa6846ad 100755
--- a/examples/account/encryption_keys/create_encryption_keys.py
+++ b/examples/account/encryption_keys/create_encryption_keys.py
@@ -5,9 +5,13 @@
 
 a = AccountClient()
 
-created = a.encryption_keys.create(aws_key_info=provisioning.CreateAwsKeyInfo(
-    key_arn=os.environ["TEST_MANAGED_KMS_KEY_ARN"], key_alias=os.environ["TEST_STORAGE_KMS_KEY_ALIAS"]),
-                                   use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES])
+created = a.encryption_keys.create(
+    aws_key_info=provisioning.CreateAwsKeyInfo(
+        key_arn=os.environ["TEST_MANAGED_KMS_KEY_ARN"],
+        key_alias=os.environ["TEST_STORAGE_KMS_KEY_ALIAS"],
+    ),
+    use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES],
+)
 
 # cleanup
 a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id)
diff --git a/examples/account/encryption_keys/get_encryption_keys.py b/examples/account/encryption_keys/get_encryption_keys.py
index 9b325dc2c..922ed327d 100755
--- a/examples/account/encryption_keys/get_encryption_keys.py
+++ b/examples/account/encryption_keys/get_encryption_keys.py
@@ -5,9 +5,13 @@
 
 a = AccountClient()
 
-created = a.encryption_keys.create(aws_key_info=provisioning.CreateAwsKeyInfo(
-    key_arn=os.environ["TEST_MANAGED_KMS_KEY_ARN"], key_alias=os.environ["TEST_STORAGE_KMS_KEY_ALIAS"]),
-                                   use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES])
+created = a.encryption_keys.create(
+    aws_key_info=provisioning.CreateAwsKeyInfo(
+        key_arn=os.environ["TEST_MANAGED_KMS_KEY_ARN"],
+        key_alias=os.environ["TEST_STORAGE_KMS_KEY_ALIAS"],
+    ),
+    use_cases=[provisioning.KeyUseCase.MANAGED_SERVICES],
+)
 
 by_id = a.encryption_keys.get(customer_managed_key_id=created.customer_managed_key_id)
 
diff --git a/examples/account/log_delivery/create_log_delivery.py b/examples/account/log_delivery/create_log_delivery.py
index 36edc03a1..d1dfc1776 100755
--- a/examples/account/log_delivery/create_log_delivery.py
+++ b/examples/account/log_delivery/create_log_delivery.py
@@ -6,23 +6,32 @@
 
 a = AccountClient()
 
-bucket = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}',
-                          root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}'))
+bucket = a.storage.create(
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
+)
 
 creds = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_LOGDELIVERY_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
+    ),
+)
 
-created = a.log_delivery.create(log_delivery_configuration=billing.CreateLogDeliveryConfigurationParams(
-    config_name=f'sdk-{time.time_ns()}',
-    credentials_id=creds.credentials_id,
-    storage_configuration_id=bucket.storage_configuration_id,
-    log_type=billing.LogType.AUDIT_LOGS,
-    output_format=billing.OutputFormat.JSON))
+created = a.log_delivery.create(
+    log_delivery_configuration=billing.CreateLogDeliveryConfigurationParams(
+        config_name=f"sdk-{time.time_ns()}",
+        credentials_id=creds.credentials_id,
+        storage_configuration_id=bucket.storage_configuration_id,
+        log_type=billing.LogType.AUDIT_LOGS,
+        output_format=billing.OutputFormat.JSON,
+    )
+)
 
 # cleanup
 a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
 a.credentials.delete(credentials_id=creds.credentials_id)
-a.log_delivery.patch_status(log_delivery_configuration_id=created.log_delivery_configuration.config_id,
-                            status=billing.LogDeliveryConfigStatus.DISABLED)
+a.log_delivery.patch_status(
+    log_delivery_configuration_id=created.log_delivery_configuration.config_id,
+    status=billing.LogDeliveryConfigStatus.DISABLED,
+)
diff --git a/examples/account/log_delivery/get_log_delivery.py b/examples/account/log_delivery/get_log_delivery.py
index af3f61927..1b8043c07 100755
--- a/examples/account/log_delivery/get_log_delivery.py
+++ b/examples/account/log_delivery/get_log_delivery.py
@@ -6,25 +6,34 @@
 
 a = AccountClient()
 
-bucket = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}',
-                          root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}'))
+bucket = a.storage.create(
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
+)
 
 creds = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_LOGDELIVERY_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"])
+    ),
+)
 
-created = a.log_delivery.create(log_delivery_configuration=billing.CreateLogDeliveryConfigurationParams(
-    config_name=f'sdk-{time.time_ns()}',
-    credentials_id=creds.credentials_id,
-    storage_configuration_id=bucket.storage_configuration_id,
-    log_type=billing.LogType.AUDIT_LOGS,
-    output_format=billing.OutputFormat.JSON))
+created = a.log_delivery.create(
+    log_delivery_configuration=billing.CreateLogDeliveryConfigurationParams(
+        config_name=f"sdk-{time.time_ns()}",
+        credentials_id=creds.credentials_id,
+        storage_configuration_id=bucket.storage_configuration_id,
+        log_type=billing.LogType.AUDIT_LOGS,
+        output_format=billing.OutputFormat.JSON,
+    )
+)
 
 by_id = a.log_delivery.get(log_delivery_configuration_id=created.log_delivery_configuration.config_id)
 
 # cleanup
 a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
 a.credentials.delete(credentials_id=creds.credentials_id)
-a.log_delivery.patch_status(log_delivery_configuration_id=created.log_delivery_configuration.config_id,
-                            status=billing.LogDeliveryConfigStatus.DISABLED)
+a.log_delivery.patch_status(
+    log_delivery_configuration_id=created.log_delivery_configuration.config_id,
+    status=billing.LogDeliveryConfigStatus.DISABLED,
+)
diff --git a/examples/account/networks/create_networks.py b/examples/account/networks/create_networks.py
index e0f1a984a..8dd7e4a76 100755
--- a/examples/account/networks/create_networks.py
+++ b/examples/account/networks/create_networks.py
@@ -4,8 +4,9 @@
 
 a = AccountClient()
 
-netw = a.networks.create(network_name=f'sdk-{time.time_ns()}',
-                         vpc_id=hex(time.time_ns())[2:],
-                         subnet_ids=[hex(time.time_ns())[2:],
-                                     hex(time.time_ns())[2:]],
-                         security_group_ids=[hex(time.time_ns())[2:]])
+netw = a.networks.create(
+    network_name=f"sdk-{time.time_ns()}",
+    vpc_id=hex(time.time_ns())[2:],
+    subnet_ids=[hex(time.time_ns())[2:], hex(time.time_ns())[2:]],
+    security_group_ids=[hex(time.time_ns())[2:]],
+)
diff --git a/examples/account/networks/get_networks.py b/examples/account/networks/get_networks.py
index 4ee37c814..395d39b02 100755
--- a/examples/account/networks/get_networks.py
+++ b/examples/account/networks/get_networks.py
@@ -4,10 +4,11 @@
 
 a = AccountClient()
 
-netw = a.networks.create(network_name=f'sdk-{time.time_ns()}',
-                         vpc_id=hex(time.time_ns())[2:],
-                         subnet_ids=[hex(time.time_ns())[2:],
-                                     hex(time.time_ns())[2:]],
-                         security_group_ids=[hex(time.time_ns())[2:]])
+netw = a.networks.create(
+    network_name=f"sdk-{time.time_ns()}",
+    vpc_id=hex(time.time_ns())[2:],
+    subnet_ids=[hex(time.time_ns())[2:], hex(time.time_ns())[2:]],
+    security_group_ids=[hex(time.time_ns())[2:]],
+)
 
 by_id = a.networks.get(network_id=netw.network_id)
diff --git a/examples/account/private_access/create_private_access.py b/examples/account/private_access/create_private_access.py
index 5cf99643c..0e079f587 100755
--- a/examples/account/private_access/create_private_access.py
+++ b/examples/account/private_access/create_private_access.py
@@ -5,8 +5,10 @@
 
 a = AccountClient()
 
-created = a.private_access.create(private_access_settings_name=f'sdk-{time.time_ns()}',
-                                  region=os.environ["AWS_REGION"])
+created = a.private_access.create(
+    private_access_settings_name=f"sdk-{time.time_ns()}",
+    region=os.environ["AWS_REGION"],
+)
 
 # cleanup
 a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
diff --git a/examples/account/private_access/get_private_access.py b/examples/account/private_access/get_private_access.py
index 2c4eed208..ccc945483 100755
--- a/examples/account/private_access/get_private_access.py
+++ b/examples/account/private_access/get_private_access.py
@@ -5,8 +5,10 @@
 
 a = AccountClient()
 
-created = a.private_access.create(private_access_settings_name=f'sdk-{time.time_ns()}',
-                                  region=os.environ["AWS_REGION"])
+created = a.private_access.create(
+    private_access_settings_name=f"sdk-{time.time_ns()}",
+    region=os.environ["AWS_REGION"],
+)
 
 by_id = a.private_access.get(private_access_settings_id=created.private_access_settings_id)
 
diff --git a/examples/account/private_access/replace_private_access.py b/examples/account/private_access/replace_private_access.py
index 8cb6f2faf..01ee1fc32 100755
--- a/examples/account/private_access/replace_private_access.py
+++ b/examples/account/private_access/replace_private_access.py
@@ -5,12 +5,16 @@
 
 a = AccountClient()
 
-created = a.private_access.create(private_access_settings_name=f'sdk-{time.time_ns()}',
-                                  region=os.environ["AWS_REGION"])
+created = a.private_access.create(
+    private_access_settings_name=f"sdk-{time.time_ns()}",
+    region=os.environ["AWS_REGION"],
+)
 
-a.private_access.replace(private_access_settings_id=created.private_access_settings_id,
-                         private_access_settings_name=f'sdk-{time.time_ns()}',
-                         region=os.environ["AWS_REGION"])
+a.private_access.replace(
+    private_access_settings_id=created.private_access_settings_id,
+    private_access_settings_name=f"sdk-{time.time_ns()}",
+    region=os.environ["AWS_REGION"],
+)
 
 # cleanup
 a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
diff --git a/examples/account/service_principals/create_account_service_principal.py b/examples/account/service_principals/create_account_service_principal.py
index 55fe3ae47..c639d1645 100755
--- a/examples/account/service_principals/create_account_service_principal.py
+++ b/examples/account/service_principals/create_account_service_principal.py
@@ -4,7 +4,7 @@
 
 a = AccountClient()
 
-sp_create = a.service_principals.create(active=True, display_name=f'sdk-{time.time_ns()}')
+sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}")
 
 # cleanup
 a.service_principals.delete(id=sp_create.id)
diff --git a/examples/account/service_principals/create_workspace_assignment.py b/examples/account/service_principals/create_workspace_assignment.py
index 86ffe951d..c3f8010b7 100755
--- a/examples/account/service_principals/create_workspace_assignment.py
+++ b/examples/account/service_principals/create_workspace_assignment.py
@@ -4,4 +4,4 @@
 
 a = AccountClient()
 
-spn = a.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}")
diff --git a/examples/account/service_principals/create_workspace_assignment_on_aws.py b/examples/account/service_principals/create_workspace_assignment_on_aws.py
index 86ffe951d..c3f8010b7 100755
--- a/examples/account/service_principals/create_workspace_assignment_on_aws.py
+++ b/examples/account/service_principals/create_workspace_assignment_on_aws.py
@@ -4,4 +4,4 @@
 
 a = AccountClient()
 
-spn = a.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}")
diff --git a/examples/account/service_principals/get_account_service_principal.py b/examples/account/service_principals/get_account_service_principal.py
index 313b6ac8c..8aef58193 100755
--- a/examples/account/service_principals/get_account_service_principal.py
+++ b/examples/account/service_principals/get_account_service_principal.py
@@ -4,7 +4,7 @@
 
 a = AccountClient()
 
-sp_create = a.service_principals.create(active=True, display_name=f'sdk-{time.time_ns()}')
+sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}")
 
 sp = a.service_principals.get(id=sp_create.id)
 
diff --git a/examples/account/service_principals/list_account_service_principal.py b/examples/account/service_principals/list_account_service_principal.py
index 15c9b6ed7..0ddcf6f0a 100755
--- a/examples/account/service_principals/list_account_service_principal.py
+++ b/examples/account/service_principals/list_account_service_principal.py
@@ -4,7 +4,7 @@
 
 a = AccountClient()
 
-sp_create = a.service_principals.create(active=True, display_name=f'sdk-{time.time_ns()}')
+sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}")
 
 sp = a.service_principals.get(id=sp_create.id)
 
diff --git a/examples/account/service_principals/patch_account_service_principal.py b/examples/account/service_principals/patch_account_service_principal.py
index cb4a7c5fe..e70940875 100755
--- a/examples/account/service_principals/patch_account_service_principal.py
+++ b/examples/account/service_principals/patch_account_service_principal.py
@@ -5,13 +5,15 @@
 
 a = AccountClient()
 
-sp_create = a.service_principals.create(active=True, display_name=f'sdk-{time.time_ns()}')
+sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}")
 
 sp = a.service_principals.get(id=sp_create.id)
 
-a.service_principals.patch(id=sp.id,
-                           operations=[iam.Patch(op=iam.PatchOp.REPLACE, path="active", value="false")],
-                           schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP])
+a.service_principals.patch(
+    id=sp.id,
+    operations=[iam.Patch(op=iam.PatchOp.REPLACE, path="active", value="false")],
+    schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
+)
 
 # cleanup
 a.service_principals.delete(id=sp_create.id)
diff --git a/examples/account/service_principals/update_account_service_principal.py b/examples/account/service_principals/update_account_service_principal.py
index 116f31c4c..62e774049 100755
--- a/examples/account/service_principals/update_account_service_principal.py
+++ b/examples/account/service_principals/update_account_service_principal.py
@@ -4,7 +4,7 @@
 
 a = AccountClient()
 
-sp_create = a.service_principals.create(active=True, display_name=f'sdk-{time.time_ns()}')
+sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}")
 
 sp = a.service_principals.get(id=sp_create.id)
 
diff --git a/examples/account/storage/create_log_delivery.py b/examples/account/storage/create_log_delivery.py
index 466b944ea..75671c75e 100755
--- a/examples/account/storage/create_log_delivery.py
+++ b/examples/account/storage/create_log_delivery.py
@@ -5,8 +5,10 @@
 
 a = AccountClient()
 
-bucket = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}',
-                          root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}'))
+bucket = a.storage.create(
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
+)
 
 # cleanup
 a.storage.delete(storage_configuration_id=bucket.storage_configuration_id)
diff --git a/examples/account/storage/create_storage.py b/examples/account/storage/create_storage.py
index c1e010868..19ea7b7b9 100755
--- a/examples/account/storage/create_storage.py
+++ b/examples/account/storage/create_storage.py
@@ -5,5 +5,7 @@
 
 a = AccountClient()
 
-storage = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}',
-                           root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}'))
+storage = a.storage.create(
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
+)
diff --git a/examples/account/storage/create_workspaces.py b/examples/account/storage/create_workspaces.py
index e8c3bb4e5..297ea8524 100755
--- a/examples/account/storage/create_workspaces.py
+++ b/examples/account/storage/create_workspaces.py
@@ -7,8 +7,9 @@
 a = AccountClient()
 
 storage = a.storage.create(
-    storage_configuration_name=f'sdk-{time.time_ns()}',
-    root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]))
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]),
+)
 
 # cleanup
 a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
diff --git a/examples/account/storage/get_storage.py b/examples/account/storage/get_storage.py
index 47c521b76..2ef0bb422 100755
--- a/examples/account/storage/get_storage.py
+++ b/examples/account/storage/get_storage.py
@@ -5,7 +5,9 @@
 
 a = AccountClient()
 
-storage = a.storage.create(storage_configuration_name=f'sdk-{time.time_ns()}',
-                           root_bucket_info=provisioning.RootBucketInfo(bucket_name=f'sdk-{time.time_ns()}'))
+storage = a.storage.create(
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"),
+)
 
 by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id)
diff --git a/examples/account/users/create_account_users.py b/examples/account/users/create_account_users.py
index 064b6f0c8..8022f4ed5 100755
--- a/examples/account/users/create_account_users.py
+++ b/examples/account/users/create_account_users.py
@@ -4,7 +4,10 @@
 
 a = AccountClient()
 
-user = a.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = a.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 # cleanup
 a.users.delete(id=user.id)
diff --git a/examples/account/users/delete_account_users.py b/examples/account/users/delete_account_users.py
index 16eb8d757..3480f5bf7 100755
--- a/examples/account/users/delete_account_users.py
+++ b/examples/account/users/delete_account_users.py
@@ -4,6 +4,9 @@
 
 a = AccountClient()
 
-user = a.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = a.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 a.users.delete(id=user.id)
diff --git a/examples/account/users/get_account_users.py b/examples/account/users/get_account_users.py
index 58f2b7b4b..7b86441e0 100755
--- a/examples/account/users/get_account_users.py
+++ b/examples/account/users/get_account_users.py
@@ -4,7 +4,10 @@
 
 a = AccountClient()
 
-user = a.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = a.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 by_id = a.users.get(id=user.id)
 
diff --git a/examples/account/users/patch_account_users.py b/examples/account/users/patch_account_users.py
index b68c0a9e5..1f97104ec 100755
--- a/examples/account/users/patch_account_users.py
+++ b/examples/account/users/patch_account_users.py
@@ -5,14 +5,21 @@
 
 a = AccountClient()
 
-user = a.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = a.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
-a.users.patch(id=user.id,
-              schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
-              operations=[
-                  iam.Patch(op=iam.PatchOp.ADD,
-                            value=iam.User(roles=[iam.ComplexValue(value="account_admin")]))
-              ])
+a.users.patch(
+    id=user.id,
+    schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
+    operations=[
+        iam.Patch(
+            op=iam.PatchOp.ADD,
+            value=iam.User(roles=[iam.ComplexValue(value="account_admin")]),
+        )
+    ],
+)
 
 # cleanup
 a.users.delete(id=user.id)
diff --git a/examples/account/vpc_endpoints/create_vpc_endpoints.py b/examples/account/vpc_endpoints/create_vpc_endpoints.py
index 750331890..3fc349de2 100755
--- a/examples/account/vpc_endpoints/create_vpc_endpoints.py
+++ b/examples/account/vpc_endpoints/create_vpc_endpoints.py
@@ -5,9 +5,11 @@
 
 a = AccountClient()
 
-created = a.vpc_endpoints.create(aws_vpc_endpoint_id=os.environ["TEST_RELAY_VPC_ENDPOINT"],
-                                 region=os.environ["AWS_REGION"],
-                                 vpc_endpoint_name=f'sdk-{time.time_ns()}')
+created = a.vpc_endpoints.create(
+    aws_vpc_endpoint_id=os.environ["TEST_RELAY_VPC_ENDPOINT"],
+    region=os.environ["AWS_REGION"],
+    vpc_endpoint_name=f"sdk-{time.time_ns()}",
+)
 
 # cleanup
 a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id)
diff --git a/examples/account/vpc_endpoints/get_vpc_endpoints.py b/examples/account/vpc_endpoints/get_vpc_endpoints.py
index d49d32683..848514175 100755
--- a/examples/account/vpc_endpoints/get_vpc_endpoints.py
+++ b/examples/account/vpc_endpoints/get_vpc_endpoints.py
@@ -5,9 +5,11 @@
 
 a = AccountClient()
 
-created = a.vpc_endpoints.create(aws_vpc_endpoint_id=os.environ["TEST_RELAY_VPC_ENDPOINT"],
-                                 region=os.environ["AWS_REGION"],
-                                 vpc_endpoint_name=f'sdk-{time.time_ns()}')
+created = a.vpc_endpoints.create(
+    aws_vpc_endpoint_id=os.environ["TEST_RELAY_VPC_ENDPOINT"],
+    region=os.environ["AWS_REGION"],
+    vpc_endpoint_name=f"sdk-{time.time_ns()}",
+)
 
 by_id = a.vpc_endpoints.get(vpc_endpoint_id=created.vpc_endpoint_id)
 
diff --git a/examples/account/workspace_assignment/update_workspace_assignment.py b/examples/account/workspace_assignment/update_workspace_assignment.py
index c58164b48..b35e8f52c 100755
--- a/examples/account/workspace_assignment/update_workspace_assignment.py
+++ b/examples/account/workspace_assignment/update_workspace_assignment.py
@@ -6,12 +6,14 @@
 
 a = AccountClient()
 
-spn = a.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}")
 
 spn_id = spn.id
 
 workspace_id = os.environ["TEST_WORKSPACE_ID"]
 
-a.workspace_assignment.update(workspace_id=workspace_id,
-                              principal_id=spn_id,
-                              permissions=[iam.WorkspacePermission.USER])
+a.workspace_assignment.update(
+    workspace_id=workspace_id,
+    principal_id=spn_id,
+    permissions=[iam.WorkspacePermission.USER],
+)
diff --git a/examples/account/workspace_assignment/update_workspace_assignment_on_aws.py b/examples/account/workspace_assignment/update_workspace_assignment_on_aws.py
index f12e85891..8bc5b45b2 100755
--- a/examples/account/workspace_assignment/update_workspace_assignment_on_aws.py
+++ b/examples/account/workspace_assignment/update_workspace_assignment_on_aws.py
@@ -6,12 +6,14 @@
 
 a = AccountClient()
 
-spn = a.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}")
 
 spn_id = spn.id
 
 workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
 
-_ = a.workspace_assignment.update(workspace_id=workspace_id,
-                                  principal_id=spn_id,
-                                  permissions=[iam.WorkspacePermission.USER])
+_ = a.workspace_assignment.update(
+    workspace_id=workspace_id,
+    principal_id=spn_id,
+    permissions=[iam.WorkspacePermission.USER],
+)
diff --git a/examples/account/workspaces/create_workspaces.py b/examples/account/workspaces/create_workspaces.py
index c2ff96ef2..d0ee50f04 100755
--- a/examples/account/workspaces/create_workspaces.py
+++ b/examples/account/workspaces/create_workspaces.py
@@ -7,18 +7,23 @@
 a = AccountClient()
 
 storage = a.storage.create(
-    storage_configuration_name=f'sdk-{time.time_ns()}',
-    root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]))
+    storage_configuration_name=f"sdk-{time.time_ns()}",
+    root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]),
+)
 
 role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
+    ),
+)
 
-waiter = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}',
-                             aws_region=os.environ["AWS_REGION"],
-                             credentials_id=role.credentials_id,
-                             storage_configuration_id=storage.storage_configuration_id)
+waiter = a.workspaces.create(
+    workspace_name=f"sdk-{time.time_ns()}",
+    aws_region=os.environ["AWS_REGION"],
+    credentials_id=role.credentials_id,
+    storage_configuration_id=storage.storage_configuration_id,
+)
 
 # cleanup
 a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
diff --git a/examples/account/workspaces/update_workspaces.py b/examples/account/workspaces/update_workspaces.py
index e93450722..23053350a 100755
--- a/examples/account/workspaces/update_workspaces.py
+++ b/examples/account/workspaces/update_workspaces.py
@@ -7,13 +7,18 @@
 a = AccountClient()
 
 update_role = a.credentials.create(
-    credentials_name=f'sdk-{time.time_ns()}',
-    aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
-        role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+    credentials_name=f"sdk-{time.time_ns()}",
+    aws_credentials=provisioning.CreateCredentialAwsCredentials(
+        sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])
+    ),
+)
 
 created = a.waiter.get()
 
-_ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result()
+_ = a.workspaces.update(
+    workspace_id=created.workspace_id,
+    credentials_id=update_role.credentials_id,
+).result()
 
 # cleanup
 a.credentials.delete(credentials_id=update_role.credentials_id)
diff --git a/examples/external_browser_auth.py b/examples/external_browser_auth.py
index 061ff60c7..f82b64115 100644
--- a/examples/external_browser_auth.py
+++ b/examples/external_browser_auth.py
@@ -1,7 +1,8 @@
-from databricks.sdk import WorkspaceClient
 import argparse
 import logging
 
+from databricks.sdk import WorkspaceClient
+
 logging.basicConfig(level=logging.DEBUG)
 
 
@@ -21,9 +22,11 @@ def register_custom_app(confidential: bool) -> tuple[str, str]:
         confidential=confidential,
         scopes=["all-apis"],
     )
-    logging.info(f"Created new custom app: "
-                 f"--client_id {custom_app.client_id} "
-                 f"{'--client_secret ' + custom_app.client_secret if confidential else ''}")
+    logging.info(
+        f"Created new custom app: "
+        f"--client_id {custom_app.client_id} "
+        f"{'--client_secret ' + custom_app.client_secret if confidential else ''}"
+    )
 
     return custom_app.client_id, custom_app.client_secret
 
@@ -32,6 +35,7 @@ def delete_custom_app(client_id: str):
     """Creates new Custom OAuth App in Databricks Account"""
     logging.info(f"Deleting custom app {client_id}")
     from databricks.sdk import AccountClient
+
     account_client = AccountClient()
     account_client.custom_app_integration.delete(client_id)
 
@@ -42,9 +46,21 @@ def delete_custom_app(client_id: str):
     parser.add_argument("--client_id", help="Databricks client_id", default=None)
     parser.add_argument("--azure_client_id", help="Databricks azure_client_id", default=None)
     parser.add_argument("--client_secret", help="Databricks client_secret", default=None)
-    parser.add_argument("--azure_client_secret", help="Databricks azure_client_secret", default=None)
-    parser.add_argument("--register-custom-app", action="store_true", help="Register a new custom app")
-    parser.add_argument("--register-custom-app-confidential", action="store_true", help="Register a new custom app")
+    parser.add_argument(
+        "--azure_client_secret",
+        help="Databricks azure_client_secret",
+        default=None,
+    )
+    parser.add_argument(
+        "--register-custom-app",
+        action="store_true",
+        help="Register a new custom app",
+    )
+    parser.add_argument(
+        "--register-custom-app-confidential",
+        action="store_true",
+        help="Register a new custom app",
+    )
     namespace = parser.parse_args()
     if namespace.register_custom_app and (namespace.client_id is not None or namespace.azure_client_id is not None):
         raise ValueError("Cannot register custom app and provide --client_id/--azure_client_id at the same time")
@@ -68,5 +84,3 @@ def delete_custom_app(client_id: str):
 
     if namespace.register_custom_app:
         delete_custom_app(client_id)
-
-
diff --git a/examples/flask_app_with_oauth.py b/examples/flask_app_with_oauth.py
index 7c18eadc7..5fed4167b 100755
--- a/examples/flask_app_with_oauth.py
+++ b/examples/flask_app_with_oauth.py
@@ -72,15 +72,18 @@ def callback():
     @app.route("/")
     def index():
         """The index page checks if the user has already authenticated and retrieves the user's credentials using
-        the Databricks SDK WorkspaceClient. It then renders the template with the clusters' list."""
+        the Databricks SDK WorkspaceClient. It then renders the template with the clusters' list.
+        """
         oidc_endpoints = get_workspace_endpoints(workspace_host)
         port = request.environ.get("SERVER_PORT")
-        redirect_url=f"http://localhost:{port}/callback"
+        redirect_url = f"http://localhost:{port}/callback"
         if "creds" not in session:
-            oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
-                                       client_id=client_id,
-                                       client_secret=client_secret,
-                                       redirect_url=redirect_url)
+            oauth_client = OAuthClient(
+                oidc_endpoints=oidc_endpoints,
+                client_id=client_id,
+                client_secret=client_secret,
+                redirect_url=redirect_url,
+            )
             consent = oauth_client.initiate_consent()
             session["consent"] = consent.as_dict()
             return redirect(consent.authorization_url)
@@ -88,19 +91,26 @@ def index():
         from databricks.sdk import WorkspaceClient
         from databricks.sdk.oauth import SessionCredentials
 
-        credentials_strategy = SessionCredentials.from_dict(session["creds"],
-                                                            token_endpoint=oidc_endpoints.token_endpoint,
-                                                            client_id=client_id,
-                                                            client_secret=client_secret,
-                                                            redirect_url=redirect_url)
-        workspace_client = WorkspaceClient(host=workspace_host,
-                                           product=APP_NAME,
-                                           credentials_strategy=credentials_strategy,
-                                           )
+        credentials_strategy = SessionCredentials.from_dict(
+            session["creds"],
+            token_endpoint=oidc_endpoints.token_endpoint,
+            client_id=client_id,
+            client_secret=client_secret,
+            redirect_url=redirect_url,
+        )
+        workspace_client = WorkspaceClient(
+            host=workspace_host,
+            product=APP_NAME,
+            credentials_strategy=credentials_strategy,
+        )
         clusters = workspace_client.clusters.list(
             filter_by=ListClustersFilterBy(cluster_states=[State.RUNNING, State.PENDING])
         )
-        return render_template_string(all_clusters_template, workspace_host=workspace_host, clusters=clusters)
+        return render_template_string(
+            all_clusters_template,
+            workspace_host=workspace_host,
+            clusters=clusters,
+        )
 
     return app
 
@@ -121,9 +131,9 @@ def register_custom_app(args: argparse.Namespace) -> tuple[str, str]:
         confidential=True,
         scopes=["all-apis"],
     )
-    logging.info(f"Created new custom app: "
-                 f"--client_id {custom_app.client_id} "
-                 f"--client_secret {custom_app.client_secret}")
+    logging.info(
+        f"Created new custom app: " f"--client_id {custom_app.client_id} " f"--client_secret {custom_app.client_secret}"
+    )
 
     return custom_app.client_id, custom_app.client_secret
 
@@ -135,15 +145,20 @@ def parse_arguments() -> argparse.Namespace:
     for flag in ["client_id", "client_secret"]:
         parser.add_argument(f"--{flag}")
     parser.add_argument("--port", default=5001, type=int)
-    parser.add_argument("--profile", default="DEFAULT", help="Databricks account profile to use for authentication.")
+    parser.add_argument(
+        "--profile",
+        default="DEFAULT",
+        help="Databricks account profile to use for authentication.",
+    )
     return parser.parse_args()
 
 
 if __name__ == "__main__":
-    logging.basicConfig(stream=sys.stdout,
-                        level=logging.INFO,
-                        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
-                        )
+    logging.basicConfig(
+        stream=sys.stdout,
+        level=logging.INFO,
+        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
+    )
     logging.getLogger("databricks.sdk").setLevel(logging.DEBUG)
 
     args = parse_arguments()
diff --git a/examples/last_job_runs.py b/examples/last_job_runs.py
index 0a9639574..2868ed0e6 100755
--- a/examples/last_job_runs.py
+++ b/examples/last_job_runs.py
@@ -7,10 +7,11 @@
 from databricks.sdk import WorkspaceClient
 
 if __name__ == "__main__":
-    logging.basicConfig(stream=sys.stdout,
-                        level=logging.INFO,
-                        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
-                        )
+    logging.basicConfig(
+        stream=sys.stdout,
+        level=logging.INFO,
+        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
+    )
 
     latest_state = {}
     all_jobs = {}
@@ -30,12 +31,14 @@
 
     summary = []
     for job_id, run in latest_state.items():
-        summary.append({
-            "job_name": all_jobs[job_id].settings.name,
-            "last_status": run.state.result_state,
-            "last_finished": datetime.fromtimestamp(run.end_time / 1000, timezone.utc),
-            "average_duration": sum(durations[job_id]) / len(durations[job_id]),
-        })
+        summary.append(
+            {
+                "job_name": all_jobs[job_id].settings.name,
+                "last_status": run.state.result_state,
+                "last_finished": datetime.fromtimestamp(run.end_time / 1000, timezone.utc),
+                "average_duration": sum(durations[job_id]) / len(durations[job_id]),
+            }
+        )
 
     for line in sorted(summary, key=lambda s: s["last_finished"], reverse=True):
         logging.info(f"Latest: {line}")
diff --git a/examples/list_compute_submitrun_runs.py b/examples/list_compute_submitrun_runs.py
index 61c591b32..21602e1a1 100644
--- a/examples/list_compute_submitrun_runs.py
+++ b/examples/list_compute_submitrun_runs.py
@@ -6,10 +6,11 @@
 from databricks.sdk.service import jobs
 
 if __name__ == "__main__":
-    logging.basicConfig(stream=sys.stdout,
-                        level=logging.INFO,
-                        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
-                        )
+    logging.basicConfig(
+        stream=sys.stdout,
+        level=logging.INFO,
+        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
+    )
 
     w = WorkspaceClient()
 
@@ -24,19 +25,22 @@
             compute_used = []
             # Iterate over tasks in the run
             for task in run.tasks:
-                '''
+                """
                 - Tasks with All Purpose clusters will have an existing_cluster_id
                 - Tasks with a Jobs cluster will have the new_cluster represented as ClusterSpec
                 - SQL tasks will have a sql_warehouse_id
-                '''
+                """
                 task_compute = (
-                    {"existing_cluster_id": task.existing_cluster_id} if task.existing_cluster_id else
-                    {"new_cluster": task.new_cluster} if task.new_cluster else
-                    {"sql_warehouse_id": task.sql_task.warehouse_id} if task.sql_task else
-                    {}
+                    {"existing_cluster_id": task.existing_cluster_id}
+                    if task.existing_cluster_id
+                    else (
+                        {"new_cluster": task.new_cluster}
+                        if task.new_cluster
+                        else ({"sql_warehouse_id": task.sql_task.warehouse_id} if task.sql_task else {})
+                    )
                 )
 
                 # Append the task compute info to a list for the job
                 compute_used.append(task_compute)
-            
+
             logging.info(f"run_id: {run.run_id}, compute_used: {compute_used}")
diff --git a/examples/starting_job_and_waiting.py b/examples/starting_job_and_waiting.py
index 0e150966d..da3b0a1eb 100755
--- a/examples/starting_job_and_waiting.py
+++ b/examples/starting_job_and_waiting.py
@@ -1,5 +1,5 @@
 #!env python3
-""" Detailed demonstration of long-running operations
+"""Detailed demonstration of long-running operations
 
 This example goes over the advanced usage of long-running operations like:
 
@@ -39,14 +39,15 @@
 import sys
 import time
 
-from databricks.sdk.service import compute, jobs
 from databricks.sdk import WorkspaceClient
+from databricks.sdk.service import compute, jobs
 
 if __name__ == "__main__":
-    logging.basicConfig(stream=sys.stdout,
-                        level=logging.INFO,
-                        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
-                        )
+    logging.basicConfig(
+        stream=sys.stdout,
+        level=logging.INFO,
+        format="%(asctime)s [%(name)s][%(levelname)s] %(message)s",
+    )
 
     w = WorkspaceClient()
 
@@ -56,19 +57,20 @@
         f.write(b'import time; time.sleep(10); print("Hello, World!")')
 
     # trigger one-time-run job and get waiter object
-    waiter = w.jobs.submit(run_name=f"py-sdk-run-{time.time()}",
-                           tasks=[
-                               jobs.SubmitTask(
-                                   task_key="hello_world",
-                                   new_cluster=compute.ClusterSpec(
-                                       spark_version=w.clusters.select_spark_version(long_term_support=True),
-                                       node_type_id=w.clusters.select_node_type(local_disk=True),
-                                       num_workers=1,
-                                   ),
-                                   spark_python_task=jobs.SparkPythonTask(python_file=f"dbfs:{py_on_dbfs}"),
-                               )
-                           ],
-                           )
+    waiter = w.jobs.submit(
+        run_name=f"py-sdk-run-{time.time()}",
+        tasks=[
+            jobs.SubmitTask(
+                task_key="hello_world",
+                new_cluster=compute.ClusterSpec(
+                    spark_version=w.clusters.select_spark_version(long_term_support=True),
+                    node_type_id=w.clusters.select_node_type(local_disk=True),
+                    num_workers=1,
+                ),
+                spark_python_task=jobs.SparkPythonTask(python_file=f"dbfs:{py_on_dbfs}"),
+            )
+        ],
+    )
 
     logging.info(f"starting to poll: {waiter.run_id}")
 
diff --git a/examples/workspace/alerts/create_alerts.py b/examples/workspace/alerts/create_alerts.py
index bae1ecf45..89e076f17 100755
--- a/examples/workspace/alerts/create_alerts.py
+++ b/examples/workspace/alerts/create_alerts.py
@@ -7,20 +7,26 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SELECT 1"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SELECT 1",
+    )
+)
 
 alert = w.alerts.create(
-    alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
-        column=sql.AlertOperandColumn(name="1")),
-                                                                   op=sql.AlertOperator.EQUAL,
-                                                                   threshold=sql.AlertConditionThreshold(
-                                                                       value=sql.AlertOperandValue(
-                                                                           double_value=1))),
-                                      display_name=f'sdk-{time.time_ns()}',
-                                      query_id=query.id))
+    alert=sql.CreateAlertRequestAlert(
+        condition=sql.AlertCondition(
+            operand=sql.AlertConditionOperand(column=sql.AlertOperandColumn(name="1")),
+            op=sql.AlertOperator.EQUAL,
+            threshold=sql.AlertConditionThreshold(value=sql.AlertOperandValue(double_value=1)),
+        ),
+        display_name=f"sdk-{time.time_ns()}",
+        query_id=query.id,
+    )
+)
 
 # cleanup
 w.queries.delete(id=query.id)
diff --git a/examples/workspace/alerts/get_alerts.py b/examples/workspace/alerts/get_alerts.py
index a1a861b14..34e88883b 100755
--- a/examples/workspace/alerts/get_alerts.py
+++ b/examples/workspace/alerts/get_alerts.py
@@ -7,20 +7,26 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SELECT 1"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SELECT 1",
+    )
+)
 
 alert = w.alerts.create(
-    alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
-        column=sql.AlertOperandColumn(name="1")),
-                                                                   op=sql.AlertOperator.EQUAL,
-                                                                   threshold=sql.AlertConditionThreshold(
-                                                                       value=sql.AlertOperandValue(
-                                                                           double_value=1))),
-                                      display_name=f'sdk-{time.time_ns()}',
-                                      query_id=query.id))
+    alert=sql.CreateAlertRequestAlert(
+        condition=sql.AlertCondition(
+            operand=sql.AlertConditionOperand(column=sql.AlertOperandColumn(name="1")),
+            op=sql.AlertOperator.EQUAL,
+            threshold=sql.AlertConditionThreshold(value=sql.AlertOperandValue(double_value=1)),
+        ),
+        display_name=f"sdk-{time.time_ns()}",
+        query_id=query.id,
+    )
+)
 
 by_id = w.alerts.get(id=alert.id)
 
diff --git a/examples/workspace/alerts/update_alerts.py b/examples/workspace/alerts/update_alerts.py
index 5d1827f9b..93dcfd670 100755
--- a/examples/workspace/alerts/update_alerts.py
+++ b/examples/workspace/alerts/update_alerts.py
@@ -7,24 +7,32 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SELECT 1"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SELECT 1",
+    )
+)
 
 alert = w.alerts.create(
-    alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand(
-        column=sql.AlertOperandColumn(name="1")),
-                                                                   op=sql.AlertOperator.EQUAL,
-                                                                   threshold=sql.AlertConditionThreshold(
-                                                                       value=sql.AlertOperandValue(
-                                                                           double_value=1))),
-                                      display_name=f'sdk-{time.time_ns()}',
-                                      query_id=query.id))
+    alert=sql.CreateAlertRequestAlert(
+        condition=sql.AlertCondition(
+            operand=sql.AlertConditionOperand(column=sql.AlertOperandColumn(name="1")),
+            op=sql.AlertOperator.EQUAL,
+            threshold=sql.AlertConditionThreshold(value=sql.AlertOperandValue(double_value=1)),
+        ),
+        display_name=f"sdk-{time.time_ns()}",
+        query_id=query.id,
+    )
+)
 
-_ = w.alerts.update(id=alert.id,
-                    alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'),
-                    update_mask="display_name")
+_ = w.alerts.update(
+    id=alert.id,
+    alert=sql.UpdateAlertRequestAlert(display_name=f"sdk-{time.time_ns()}"),
+    update_mask="display_name",
+)
 
 # cleanup
 w.queries.delete(id=query.id)
diff --git a/examples/workspace/catalogs/create_catalog_workspace_bindings.py b/examples/workspace/catalogs/create_catalog_workspace_bindings.py
index 7d746c0bb..c2980c2fb 100755
--- a/examples/workspace/catalogs/create_catalog_workspace_bindings.py
+++ b/examples/workspace/catalogs/create_catalog_workspace_bindings.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=created.name, force=True)
diff --git a/examples/workspace/catalogs/create_catalogs.py b/examples/workspace/catalogs/create_catalogs.py
index 7d746c0bb..c2980c2fb 100755
--- a/examples/workspace/catalogs/create_catalogs.py
+++ b/examples/workspace/catalogs/create_catalogs.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=created.name, force=True)
diff --git a/examples/workspace/catalogs/create_schemas.py b/examples/workspace/catalogs/create_schemas.py
index f0d29dea8..28bbbaf33 100755
--- a/examples/workspace/catalogs/create_schemas.py
+++ b/examples/workspace/catalogs/create_schemas.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-new_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=new_catalog.name, force=True)
diff --git a/examples/workspace/catalogs/create_shares.py b/examples/workspace/catalogs/create_shares.py
index 85ef22704..2fbd07bc1 100755
--- a/examples/workspace/catalogs/create_shares.py
+++ b/examples/workspace/catalogs/create_shares.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=created_catalog.name, force=True)
diff --git a/examples/workspace/catalogs/create_tables.py b/examples/workspace/catalogs/create_tables.py
index 85ef22704..2fbd07bc1 100755
--- a/examples/workspace/catalogs/create_tables.py
+++ b/examples/workspace/catalogs/create_tables.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=created_catalog.name, force=True)
diff --git a/examples/workspace/catalogs/create_volumes.py b/examples/workspace/catalogs/create_volumes.py
index 85ef22704..2fbd07bc1 100755
--- a/examples/workspace/catalogs/create_volumes.py
+++ b/examples/workspace/catalogs/create_volumes.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=created_catalog.name, force=True)
diff --git a/examples/workspace/catalogs/get_catalogs.py b/examples/workspace/catalogs/get_catalogs.py
index af9c4f660..2cdfc30de 100755
--- a/examples/workspace/catalogs/get_catalogs.py
+++ b/examples/workspace/catalogs/get_catalogs.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 _ = w.catalogs.get(name=created.name)
 
diff --git a/examples/workspace/catalogs/update_catalog_workspace_bindings.py b/examples/workspace/catalogs/update_catalog_workspace_bindings.py
index 09a97dee8..9acd91e4f 100755
--- a/examples/workspace/catalogs/update_catalog_workspace_bindings.py
+++ b/examples/workspace/catalogs/update_catalog_workspace_bindings.py
@@ -5,7 +5,7 @@
 
 w = WorkspaceClient()
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED)
 
diff --git a/examples/workspace/catalogs/update_catalogs.py b/examples/workspace/catalogs/update_catalogs.py
index 6f8e257cd..ef801c350 100755
--- a/examples/workspace/catalogs/update_catalogs.py
+++ b/examples/workspace/catalogs/update_catalogs.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 _ = w.catalogs.update(name=created.name, comment="updated")
 
diff --git a/examples/workspace/cluster_policies/create_cluster_policies.py b/examples/workspace/cluster_policies/create_cluster_policies.py
index 53d6ce96f..823462dcb 100755
--- a/examples/workspace/cluster_policies/create_cluster_policies.py
+++ b/examples/workspace/cluster_policies/create_cluster_policies.py
@@ -4,14 +4,16 @@
 
 w = WorkspaceClient()
 
-created = w.cluster_policies.create(name=f'sdk-{time.time_ns()}',
-                                    definition="""{
+created = w.cluster_policies.create(
+    name=f"sdk-{time.time_ns()}",
+    definition="""{
             "spark_conf.spark.databricks.delta.preview.enabled": {
                 "type": "fixed",
                 "value": true
             }
         }
-""")
+""",
+)
 
 # cleanup
 w.cluster_policies.delete(policy_id=created.policy_id)
diff --git a/examples/workspace/cluster_policies/edit_cluster_policies.py b/examples/workspace/cluster_policies/edit_cluster_policies.py
index 5c2777e17..d6bdeb7c4 100755
--- a/examples/workspace/cluster_policies/edit_cluster_policies.py
+++ b/examples/workspace/cluster_policies/edit_cluster_policies.py
@@ -4,26 +4,30 @@
 
 w = WorkspaceClient()
 
-created = w.cluster_policies.create(name=f'sdk-{time.time_ns()}',
-                                    definition="""{
+created = w.cluster_policies.create(
+    name=f"sdk-{time.time_ns()}",
+    definition="""{
             "spark_conf.spark.databricks.delta.preview.enabled": {
                 "type": "fixed",
                 "value": true
             }
         }
-""")
+""",
+)
 
 policy = w.cluster_policies.get(policy_id=created.policy_id)
 
-w.cluster_policies.edit(policy_id=policy.policy_id,
-                        name=policy.name,
-                        definition="""{
+w.cluster_policies.edit(
+    policy_id=policy.policy_id,
+    name=policy.name,
+    definition="""{
             "spark_conf.spark.databricks.delta.preview.enabled": {
                 "type": "fixed",
                 "value": false
             }
         }
-""")
+""",
+)
 
 # cleanup
 w.cluster_policies.delete(policy_id=created.policy_id)
diff --git a/examples/workspace/cluster_policies/get_cluster_policies.py b/examples/workspace/cluster_policies/get_cluster_policies.py
index d93196bef..ea3edae82 100755
--- a/examples/workspace/cluster_policies/get_cluster_policies.py
+++ b/examples/workspace/cluster_policies/get_cluster_policies.py
@@ -4,14 +4,16 @@
 
 w = WorkspaceClient()
 
-created = w.cluster_policies.create(name=f'sdk-{time.time_ns()}',
-                                    definition="""{
+created = w.cluster_policies.create(
+    name=f"sdk-{time.time_ns()}",
+    definition="""{
             "spark_conf.spark.databricks.delta.preview.enabled": {
                 "type": "fixed",
                 "value": true
             }
         }
-""")
+""",
+)
 
 policy = w.cluster_policies.get(policy_id=created.policy_id)
 
diff --git a/examples/workspace/clusters/change_owner_clusters_api_integration.py b/examples/workspace/clusters/change_owner_clusters_api_integration.py
index 6fd9a14c7..bdd37aaa2 100755
--- a/examples/workspace/clusters/change_owner_clusters_api_integration.py
+++ b/examples/workspace/clusters/change_owner_clusters_api_integration.py
@@ -7,15 +7,17 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com')
+other_owner = w.users.create(user_name=f"sdk-{time.time_ns()}@example.com")
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 w.clusters.change_owner(cluster_id=clstr.cluster_id, owner_username=other_owner.user_name)
 
diff --git a/examples/workspace/clusters/create_clusters_api_integration.py b/examples/workspace/clusters/create_clusters_api_integration.py
index 48b31308c..14a5349c6 100755
--- a/examples/workspace/clusters/create_clusters_api_integration.py
+++ b/examples/workspace/clusters/create_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 # cleanup
 w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
diff --git a/examples/workspace/clusters/delete_clusters_api_integration.py b/examples/workspace/clusters/delete_clusters_api_integration.py
index 5f096893b..038ea0b36 100755
--- a/examples/workspace/clusters/delete_clusters_api_integration.py
+++ b/examples/workspace/clusters/delete_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 _ = w.clusters.delete(cluster_id=clstr.cluster_id).result()
 
diff --git a/examples/workspace/clusters/edit_clusters_api_integration.py b/examples/workspace/clusters/edit_clusters_api_integration.py
index 625eae461..e434ebde2 100755
--- a/examples/workspace/clusters/edit_clusters_api_integration.py
+++ b/examples/workspace/clusters/edit_clusters_api_integration.py
@@ -5,22 +5,26 @@
 
 w = WorkspaceClient()
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
-_ = w.clusters.edit(cluster_id=clstr.cluster_id,
-                    spark_version=latest,
-                    cluster_name=cluster_name,
-                    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                    autotermination_minutes=10,
-                    num_workers=2).result()
+_ = w.clusters.edit(
+    cluster_id=clstr.cluster_id,
+    spark_version=latest,
+    cluster_name=cluster_name,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=10,
+    num_workers=2,
+).result()
 
 # cleanup
 w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
diff --git a/examples/workspace/clusters/events_clusters_api_integration.py b/examples/workspace/clusters/events_clusters_api_integration.py
index 3701a1737..a869bb64c 100755
--- a/examples/workspace/clusters/events_clusters_api_integration.py
+++ b/examples/workspace/clusters/events_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 events = w.clusters.events(cluster_id=clstr.cluster_id)
 
diff --git a/examples/workspace/clusters/get_clusters_api_integration.py b/examples/workspace/clusters/get_clusters_api_integration.py
index 718977dfb..77fa56c38 100755
--- a/examples/workspace/clusters/get_clusters_api_integration.py
+++ b/examples/workspace/clusters/get_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 by_id = w.clusters.get(cluster_id=clstr.cluster_id)
 
diff --git a/examples/workspace/clusters/pin_clusters_api_integration.py b/examples/workspace/clusters/pin_clusters_api_integration.py
index 31f5610b6..1911012a5 100755
--- a/examples/workspace/clusters/pin_clusters_api_integration.py
+++ b/examples/workspace/clusters/pin_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 w.clusters.pin(cluster_id=clstr.cluster_id)
 
diff --git a/examples/workspace/clusters/resize_clusters_api_integration.py b/examples/workspace/clusters/resize_clusters_api_integration.py
index 9711185ee..8f2336041 100755
--- a/examples/workspace/clusters/resize_clusters_api_integration.py
+++ b/examples/workspace/clusters/resize_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 by_id = w.clusters.resize(cluster_id=clstr.cluster_id, num_workers=1).result()
 
diff --git a/examples/workspace/clusters/restart_clusters_api_integration.py b/examples/workspace/clusters/restart_clusters_api_integration.py
index b78b57b52..c25a560b3 100755
--- a/examples/workspace/clusters/restart_clusters_api_integration.py
+++ b/examples/workspace/clusters/restart_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 _ = w.clusters.restart(cluster_id=clstr.cluster_id).result()
 
diff --git a/examples/workspace/clusters/start_clusters_api_integration.py b/examples/workspace/clusters/start_clusters_api_integration.py
index 68594ba16..96effabdb 100755
--- a/examples/workspace/clusters/start_clusters_api_integration.py
+++ b/examples/workspace/clusters/start_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 _ = w.clusters.start(cluster_id=clstr.cluster_id).result()
 
diff --git a/examples/workspace/clusters/unpin_clusters_api_integration.py b/examples/workspace/clusters/unpin_clusters_api_integration.py
index c4c5df5d7..3744aff87 100755
--- a/examples/workspace/clusters/unpin_clusters_api_integration.py
+++ b/examples/workspace/clusters/unpin_clusters_api_integration.py
@@ -7,13 +7,15 @@
 
 latest = w.clusters.select_spark_version(latest=True, long_term_support=True)
 
-cluster_name = f'sdk-{time.time_ns()}'
+cluster_name = f"sdk-{time.time_ns()}"
 
-clstr = w.clusters.create(cluster_name=cluster_name,
-                          spark_version=latest,
-                          instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                          autotermination_minutes=15,
-                          num_workers=1).result()
+clstr = w.clusters.create(
+    cluster_name=cluster_name,
+    spark_version=latest,
+    instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+    autotermination_minutes=15,
+    num_workers=1,
+).result()
 
 w.clusters.unpin(cluster_id=clstr.cluster_id)
 
diff --git a/examples/workspace/command_execution/execute_commands_direct_usage.py b/examples/workspace/command_execution/execute_commands_direct_usage.py
index 98fa13a19..1bf5dd251 100755
--- a/examples/workspace/command_execution/execute_commands_direct_usage.py
+++ b/examples/workspace/command_execution/execute_commands_direct_usage.py
@@ -9,10 +9,12 @@
 
 context = w.command_execution.create(cluster_id=cluster_id, language=compute.Language.PYTHON).result()
 
-text_results = w.command_execution.execute(cluster_id=cluster_id,
-                                           context_id=context.id,
-                                           language=compute.Language.PYTHON,
-                                           command="print(1)").result()
+text_results = w.command_execution.execute(
+    cluster_id=cluster_id,
+    context_id=context.id,
+    language=compute.Language.PYTHON,
+    command="print(1)",
+).result()
 
 # cleanup
 w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id)
diff --git a/examples/workspace/command_executor/execute_commands.py b/examples/workspace/command_executor/execute_commands.py
index a368ee732..9fb714d62 100755
--- a/examples/workspace/command_executor/execute_commands.py
+++ b/examples/workspace/command_executor/execute_commands.py
@@ -4,7 +4,8 @@
 
 w = WorkspaceClient()
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
 res = w.command_executor.execute(cluster_id, "python", "print(1)")
diff --git a/examples/workspace/connections/create_connections.py b/examples/workspace/connections/create_connections.py
index 46701b767..2ea78993f 100755
--- a/examples/workspace/connections/create_connections.py
+++ b/examples/workspace/connections/create_connections.py
@@ -5,17 +5,16 @@
 
 w = WorkspaceClient()
 
-conn_create = w.connections.create(comment="Go SDK Acceptance Test Connection",
-                                   connection_type=catalog.ConnectionType.DATABRICKS,
-                                   name=f'sdk-{time.time_ns()}',
-                                   options={
-                                       "host":
-                                       "%s-fake-workspace.cloud.databricks.com" % (f'sdk-{time.time_ns()}'),
-                                       "httpPath":
-                                       "/sql/1.0/warehouses/%s" % (f'sdk-{time.time_ns()}'),
-                                       "personalAccessToken":
-                                       f'sdk-{time.time_ns()}',
-                                   })
+conn_create = w.connections.create(
+    comment="Go SDK Acceptance Test Connection",
+    connection_type=catalog.ConnectionType.DATABRICKS,
+    name=f"sdk-{time.time_ns()}",
+    options={
+        "host": "%s-fake-workspace.cloud.databricks.com" % (f"sdk-{time.time_ns()}"),
+        "httpPath": "/sql/1.0/warehouses/%s" % (f"sdk-{time.time_ns()}"),
+        "personalAccessToken": f"sdk-{time.time_ns()}",
+    },
+)
 
 # cleanup
 w.connections.delete(name=conn_create.name)
diff --git a/examples/workspace/connections/get_connections.py b/examples/workspace/connections/get_connections.py
index 20d20c9ca..b610ee1ed 100755
--- a/examples/workspace/connections/get_connections.py
+++ b/examples/workspace/connections/get_connections.py
@@ -5,27 +5,25 @@
 
 w = WorkspaceClient()
 
-conn_create = w.connections.create(comment="Go SDK Acceptance Test Connection",
-                                   connection_type=catalog.ConnectionType.DATABRICKS,
-                                   name=f'sdk-{time.time_ns()}',
-                                   options={
-                                       "host":
-                                       "%s-fake-workspace.cloud.databricks.com" % (f'sdk-{time.time_ns()}'),
-                                       "httpPath":
-                                       "/sql/1.0/warehouses/%s" % (f'sdk-{time.time_ns()}'),
-                                       "personalAccessToken":
-                                       f'sdk-{time.time_ns()}',
-                                   })
+conn_create = w.connections.create(
+    comment="Go SDK Acceptance Test Connection",
+    connection_type=catalog.ConnectionType.DATABRICKS,
+    name=f"sdk-{time.time_ns()}",
+    options={
+        "host": "%s-fake-workspace.cloud.databricks.com" % (f"sdk-{time.time_ns()}"),
+        "httpPath": "/sql/1.0/warehouses/%s" % (f"sdk-{time.time_ns()}"),
+        "personalAccessToken": f"sdk-{time.time_ns()}",
+    },
+)
 
-conn_update = w.connections.update(name=conn_create.name,
-                                   options={
-                                       "host":
-                                       "%s-fake-workspace.cloud.databricks.com" % (f'sdk-{time.time_ns()}'),
-                                       "httpPath":
-                                       "/sql/1.0/warehouses/%s" % (f'sdk-{time.time_ns()}'),
-                                       "personalAccessToken":
-                                       f'sdk-{time.time_ns()}',
-                                   })
+conn_update = w.connections.update(
+    name=conn_create.name,
+    options={
+        "host": "%s-fake-workspace.cloud.databricks.com" % (f"sdk-{time.time_ns()}"),
+        "httpPath": "/sql/1.0/warehouses/%s" % (f"sdk-{time.time_ns()}"),
+        "personalAccessToken": f"sdk-{time.time_ns()}",
+    },
+)
 
 conn = w.connections.get(name=conn_update.name)
 
diff --git a/examples/workspace/connections/update_connections.py b/examples/workspace/connections/update_connections.py
index 49cfcdc36..bc575ff7e 100755
--- a/examples/workspace/connections/update_connections.py
+++ b/examples/workspace/connections/update_connections.py
@@ -5,27 +5,25 @@
 
 w = WorkspaceClient()
 
-conn_create = w.connections.create(comment="Go SDK Acceptance Test Connection",
-                                   connection_type=catalog.ConnectionType.DATABRICKS,
-                                   name=f'sdk-{time.time_ns()}',
-                                   options={
-                                       "host":
-                                       "%s-fake-workspace.cloud.databricks.com" % (f'sdk-{time.time_ns()}'),
-                                       "httpPath":
-                                       "/sql/1.0/warehouses/%s" % (f'sdk-{time.time_ns()}'),
-                                       "personalAccessToken":
-                                       f'sdk-{time.time_ns()}',
-                                   })
+conn_create = w.connections.create(
+    comment="Go SDK Acceptance Test Connection",
+    connection_type=catalog.ConnectionType.DATABRICKS,
+    name=f"sdk-{time.time_ns()}",
+    options={
+        "host": "%s-fake-workspace.cloud.databricks.com" % (f"sdk-{time.time_ns()}"),
+        "httpPath": "/sql/1.0/warehouses/%s" % (f"sdk-{time.time_ns()}"),
+        "personalAccessToken": f"sdk-{time.time_ns()}",
+    },
+)
 
-conn_update = w.connections.update(name=conn_create.name,
-                                   options={
-                                       "host":
-                                       "%s-fake-workspace.cloud.databricks.com" % (f'sdk-{time.time_ns()}'),
-                                       "httpPath":
-                                       "/sql/1.0/warehouses/%s" % (f'sdk-{time.time_ns()}'),
-                                       "personalAccessToken":
-                                       f'sdk-{time.time_ns()}',
-                                   })
+conn_update = w.connections.update(
+    name=conn_create.name,
+    options={
+        "host": "%s-fake-workspace.cloud.databricks.com" % (f"sdk-{time.time_ns()}"),
+        "httpPath": "/sql/1.0/warehouses/%s" % (f"sdk-{time.time_ns()}"),
+        "personalAccessToken": f"sdk-{time.time_ns()}",
+    },
+)
 
 # cleanup
 w.connections.delete(name=conn_create.name)
diff --git a/examples/workspace/dashboards/create_dashboards.py b/examples/workspace/dashboards/create_dashboards.py
index 9ac5670a0..fce5fbe13 100755
--- a/examples/workspace/dashboards/create_dashboards.py
+++ b/examples/workspace/dashboards/create_dashboards.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.dashboards.create(name=f'sdk-{time.time_ns()}')
+created = w.dashboards.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.dashboards.delete(dashboard_id=created.id)
diff --git a/examples/workspace/dashboards/delete_dashboards.py b/examples/workspace/dashboards/delete_dashboards.py
index 8c5806ec6..43addaf81 100755
--- a/examples/workspace/dashboards/delete_dashboards.py
+++ b/examples/workspace/dashboards/delete_dashboards.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.dashboards.create(name=f'sdk-{time.time_ns()}')
+created = w.dashboards.create(name=f"sdk-{time.time_ns()}")
 
 w.dashboards.delete(dashboard_id=created.id)
 
diff --git a/examples/workspace/dashboards/get_dashboards.py b/examples/workspace/dashboards/get_dashboards.py
index 3aa5ae55f..631e4bda3 100755
--- a/examples/workspace/dashboards/get_dashboards.py
+++ b/examples/workspace/dashboards/get_dashboards.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.dashboards.create(name=f'sdk-{time.time_ns()}')
+created = w.dashboards.create(name=f"sdk-{time.time_ns()}")
 
 by_id = w.dashboards.get(dashboard_id=created.id)
 
diff --git a/examples/workspace/dashboards/restore_dashboards.py b/examples/workspace/dashboards/restore_dashboards.py
index 7a34049d4..cb83bac29 100755
--- a/examples/workspace/dashboards/restore_dashboards.py
+++ b/examples/workspace/dashboards/restore_dashboards.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.dashboards.create(name=f'sdk-{time.time_ns()}')
+created = w.dashboards.create(name=f"sdk-{time.time_ns()}")
 
 w.dashboards.restore(dashboard_id=created.id)
 
diff --git a/examples/workspace/databricks/must_tokens.py b/examples/workspace/databricks/must_tokens.py
index 3c1c6c6d8..5f7c57daa 100755
--- a/examples/workspace/databricks/must_tokens.py
+++ b/examples/workspace/databricks/must_tokens.py
@@ -4,10 +4,11 @@
 
 w = WorkspaceClient()
 
-token = w.tokens.create(comment=f'sdk-{time.time_ns()}', lifetime_seconds=300)
+token = w.tokens.create(comment=f"sdk-{time.time_ns()}", lifetime_seconds=300)
 
 wsc_inner = w.databricks.must(
-    new_workspace_client(databricks.Config(host=w.config.host, token=token.token_value, auth_type="pat")))
+    new_workspace_client(databricks.Config(host=w.config.host, token=token.token_value, auth_type="pat"))
+)
 
 # cleanup
 w.tokens.delete(token_id=token.token_info.token_id)
diff --git a/examples/workspace/dbfs/download_file.py b/examples/workspace/dbfs/download_file.py
index 45abeaa61..8465955cf 100644
--- a/examples/workspace/dbfs/download_file.py
+++ b/examples/workspace/dbfs/download_file.py
@@ -6,10 +6,10 @@
 
 w = WorkspaceClient()
 
-root = pathlib.Path(f'/tmp/{time.time_ns()}')
+root = pathlib.Path(f"/tmp/{time.time_ns()}")
 
 f = io.BytesIO(b"some text data")
-w.dbfs.upload(f'{root}/01', f)
+w.dbfs.upload(f"{root}/01", f)
 
-with w.dbfs.download(f'{root}/01') as f:
-    assert f.read() == b"some text data"
\ No newline at end of file
+with w.dbfs.download(f"{root}/01") as f:
+    assert f.read() == b"some text data"
diff --git a/examples/workspace/experiments/create_experiment_experiments.py b/examples/workspace/experiments/create_experiment_experiments.py
index 99ca7488d..157fa5ed8 100755
--- a/examples/workspace/experiments/create_experiment_experiments.py
+++ b/examples/workspace/experiments/create_experiment_experiments.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-experiment = w.experiments.create_experiment(name=f'sdk-{time.time_ns()}')
+experiment = w.experiments.create_experiment(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
diff --git a/examples/workspace/experiments/create_experiment_m_lflow_runs.py b/examples/workspace/experiments/create_experiment_m_lflow_runs.py
index 99ca7488d..157fa5ed8 100755
--- a/examples/workspace/experiments/create_experiment_m_lflow_runs.py
+++ b/examples/workspace/experiments/create_experiment_m_lflow_runs.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-experiment = w.experiments.create_experiment(name=f'sdk-{time.time_ns()}')
+experiment = w.experiments.create_experiment(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
diff --git a/examples/workspace/experiments/create_run_m_lflow_runs.py b/examples/workspace/experiments/create_run_m_lflow_runs.py
index 2351d4b58..244181c8a 100755
--- a/examples/workspace/experiments/create_run_m_lflow_runs.py
+++ b/examples/workspace/experiments/create_run_m_lflow_runs.py
@@ -5,10 +5,12 @@
 
 w = WorkspaceClient()
 
-experiment = w.experiments.create_experiment(name=f'sdk-{time.time_ns()}')
+experiment = w.experiments.create_experiment(name=f"sdk-{time.time_ns()}")
 
-created = w.experiments.create_run(experiment_id=experiment.experiment_id,
-                                   tags=[ml.RunTag(key="foo", value="bar")])
+created = w.experiments.create_run(
+    experiment_id=experiment.experiment_id,
+    tags=[ml.RunTag(key="foo", value="bar")],
+)
 
 # cleanup
 w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
diff --git a/examples/workspace/experiments/get_experiment_experiments.py b/examples/workspace/experiments/get_experiment_experiments.py
index a007e99a9..b640d280b 100755
--- a/examples/workspace/experiments/get_experiment_experiments.py
+++ b/examples/workspace/experiments/get_experiment_experiments.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-experiment = w.experiments.create_experiment(name=f'sdk-{time.time_ns()}')
+experiment = w.experiments.create_experiment(name=f"sdk-{time.time_ns()}")
 
 _ = w.experiments.get_experiment(experiment_id=experiment.experiment_id)
 
diff --git a/examples/workspace/experiments/update_experiment_experiments.py b/examples/workspace/experiments/update_experiment_experiments.py
index 9eba74f56..ba975a4e9 100755
--- a/examples/workspace/experiments/update_experiment_experiments.py
+++ b/examples/workspace/experiments/update_experiment_experiments.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-experiment = w.experiments.create_experiment(name=f'sdk-{time.time_ns()}')
+experiment = w.experiments.create_experiment(name=f"sdk-{time.time_ns()}")
 
-w.experiments.update_experiment(new_name=f'sdk-{time.time_ns()}', experiment_id=experiment.experiment_id)
+w.experiments.update_experiment(new_name=f"sdk-{time.time_ns()}", experiment_id=experiment.experiment_id)
 
 # cleanup
 w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
diff --git a/examples/workspace/experiments/update_run_m_lflow_runs.py b/examples/workspace/experiments/update_run_m_lflow_runs.py
index aacdd498f..cd94c0d31 100755
--- a/examples/workspace/experiments/update_run_m_lflow_runs.py
+++ b/examples/workspace/experiments/update_run_m_lflow_runs.py
@@ -5,10 +5,12 @@
 
 w = WorkspaceClient()
 
-experiment = w.experiments.create_experiment(name=f'sdk-{time.time_ns()}')
+experiment = w.experiments.create_experiment(name=f"sdk-{time.time_ns()}")
 
-created = w.experiments.create_run(experiment_id=experiment.experiment_id,
-                                   tags=[ml.RunTag(key="foo", value="bar")])
+created = w.experiments.create_run(
+    experiment_id=experiment.experiment_id,
+    tags=[ml.RunTag(key="foo", value="bar")],
+)
 
 _ = w.experiments.update_run(run_id=created.run.info.run_id, status=ml.UpdateRunStatus.KILLED)
 
diff --git a/examples/workspace/external_locations/create_external_locations.py b/examples/workspace/external_locations/create_external_locations.py
index b9d1036d1..8aabe95ab 100755
--- a/examples/workspace/external_locations/create_external_locations.py
+++ b/examples/workspace/external_locations/create_external_locations.py
@@ -7,12 +7,15 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
-created = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                      credential_name=credential.name,
-                                      url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}')
+created = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=credential.name,
+    url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
+)
 
 # cleanup
 w.storage_credentials.delete(delete=credential.name)
diff --git a/examples/workspace/external_locations/create_external_locations_on_aws.py b/examples/workspace/external_locations/create_external_locations_on_aws.py
index c6010ccfb..4a63ca669 100755
--- a/examples/workspace/external_locations/create_external_locations_on_aws.py
+++ b/examples/workspace/external_locations/create_external_locations_on_aws.py
@@ -7,12 +7,15 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
-created = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                      credential_name=credential.name,
-                                      url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=credential.name,
+    url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 # cleanup
 w.storage_credentials.delete(name=credential.name)
diff --git a/examples/workspace/external_locations/create_volumes.py b/examples/workspace/external_locations/create_volumes.py
index 1486287d6..40b4357b8 100755
--- a/examples/workspace/external_locations/create_volumes.py
+++ b/examples/workspace/external_locations/create_volumes.py
@@ -7,15 +7,17 @@
 w = WorkspaceClient()
 
 storage_credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
-    comment="created via SDK")
+    comment="created via SDK",
+)
 
-external_location = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                                credential_name=storage_credential.name,
-                                                comment="created via SDK",
-                                                url="s3://" + os.environ["TEST_BUCKET"] + "/" +
-                                                f'sdk-{time.time_ns()}')
+external_location = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=storage_credential.name,
+    comment="created via SDK",
+    url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
+)
 
 # cleanup
 w.storage_credentials.delete(name=storage_credential.name)
diff --git a/examples/workspace/external_locations/get_external_locations.py b/examples/workspace/external_locations/get_external_locations.py
index 86c7d4a82..157794fdb 100755
--- a/examples/workspace/external_locations/get_external_locations.py
+++ b/examples/workspace/external_locations/get_external_locations.py
@@ -7,12 +7,15 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
-created = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                      credential_name=credential.name,
-                                      url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}')
+created = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=credential.name,
+    url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
+)
 
 _ = w.external_locations.get(get=created.name)
 
diff --git a/examples/workspace/external_locations/get_external_locations_on_aws.py b/examples/workspace/external_locations/get_external_locations_on_aws.py
index 773dbfd5d..63fc3b95c 100755
--- a/examples/workspace/external_locations/get_external_locations_on_aws.py
+++ b/examples/workspace/external_locations/get_external_locations_on_aws.py
@@ -7,12 +7,15 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
-created = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                      credential_name=credential.name,
-                                      url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=credential.name,
+    url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 _ = w.external_locations.get(name=created.name)
 
diff --git a/examples/workspace/external_locations/update_external_locations.py b/examples/workspace/external_locations/update_external_locations.py
index 1d1d822ae..5820b336e 100755
--- a/examples/workspace/external_locations/update_external_locations.py
+++ b/examples/workspace/external_locations/update_external_locations.py
@@ -7,16 +7,21 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
-created = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                      credential_name=credential.name,
-                                      url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}')
+created = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=credential.name,
+    url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
+)
 
-_ = w.external_locations.update(name=created.name,
-                                credential_name=credential.name,
-                                url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}')
+_ = w.external_locations.update(
+    name=created.name,
+    credential_name=credential.name,
+    url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
+)
 
 # cleanup
 w.storage_credentials.delete(delete=credential.name)
diff --git a/examples/workspace/external_locations/update_external_locations_on_aws.py b/examples/workspace/external_locations/update_external_locations_on_aws.py
index 11bd19959..f92c86568 100755
--- a/examples/workspace/external_locations/update_external_locations_on_aws.py
+++ b/examples/workspace/external_locations/update_external_locations_on_aws.py
@@ -7,16 +7,21 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
-created = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                      credential_name=credential.name,
-                                      url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=credential.name,
+    url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
-_ = w.external_locations.update(name=created.name,
-                                credential_name=credential.name,
-                                url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+_ = w.external_locations.update(
+    name=created.name,
+    credential_name=credential.name,
+    url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 # cleanup
 w.storage_credentials.delete(name=credential.name)
diff --git a/examples/workspace/git_credentials/update_git_credentials.py b/examples/workspace/git_credentials/update_git_credentials.py
index c877fefcb..9b8d4bb0b 100755
--- a/examples/workspace/git_credentials/update_git_credentials.py
+++ b/examples/workspace/git_credentials/update_git_credentials.py
@@ -6,10 +6,12 @@
 
 cr = w.git_credentials.create(git_provider="gitHub", git_username="test", personal_access_token="test")
 
-w.git_credentials.update(credential_id=cr.credential_id,
-                         git_provider="gitHub",
-                         git_username=f'sdk-{time.time_ns()}@example.com',
-                         personal_access_token=f'sdk-{time.time_ns()}')
+w.git_credentials.update(
+    credential_id=cr.credential_id,
+    git_provider="gitHub",
+    git_username=f"sdk-{time.time_ns()}@example.com",
+    personal_access_token=f"sdk-{time.time_ns()}",
+)
 
 # cleanup
 w.git_credentials.delete(credential_id=cr.credential_id)
diff --git a/examples/workspace/global_init_scripts/create_global_init_scripts.py b/examples/workspace/global_init_scripts/create_global_init_scripts.py
index 6058bbf59..08516969f 100755
--- a/examples/workspace/global_init_scripts/create_global_init_scripts.py
+++ b/examples/workspace/global_init_scripts/create_global_init_scripts.py
@@ -5,10 +5,12 @@
 
 w = WorkspaceClient()
 
-created = w.global_init_scripts.create(name=f'sdk-{time.time_ns()}',
-                                       script=base64.b64encode(("echo 1").encode()).decode(),
-                                       enabled=True,
-                                       position=10)
+created = w.global_init_scripts.create(
+    name=f"sdk-{time.time_ns()}",
+    script=base64.b64encode(("echo 1").encode()).decode(),
+    enabled=True,
+    position=10,
+)
 
 # cleanup
 w.global_init_scripts.delete(script_id=created.script_id)
diff --git a/examples/workspace/global_init_scripts/get_global_init_scripts.py b/examples/workspace/global_init_scripts/get_global_init_scripts.py
index fe12d41b8..76f654ae2 100755
--- a/examples/workspace/global_init_scripts/get_global_init_scripts.py
+++ b/examples/workspace/global_init_scripts/get_global_init_scripts.py
@@ -5,10 +5,12 @@
 
 w = WorkspaceClient()
 
-created = w.global_init_scripts.create(name=f'sdk-{time.time_ns()}',
-                                       script=base64.b64encode(("echo 1").encode()).decode(),
-                                       enabled=True,
-                                       position=10)
+created = w.global_init_scripts.create(
+    name=f"sdk-{time.time_ns()}",
+    script=base64.b64encode(("echo 1").encode()).decode(),
+    enabled=True,
+    position=10,
+)
 
 by_id = w.global_init_scripts.get(script_id=created.script_id)
 
diff --git a/examples/workspace/global_init_scripts/update_global_init_scripts.py b/examples/workspace/global_init_scripts/update_global_init_scripts.py
index a53e410a2..3125b1aa9 100755
--- a/examples/workspace/global_init_scripts/update_global_init_scripts.py
+++ b/examples/workspace/global_init_scripts/update_global_init_scripts.py
@@ -5,14 +5,18 @@
 
 w = WorkspaceClient()
 
-created = w.global_init_scripts.create(name=f'sdk-{time.time_ns()}',
-                                       script=base64.b64encode(("echo 1").encode()).decode(),
-                                       enabled=True,
-                                       position=10)
+created = w.global_init_scripts.create(
+    name=f"sdk-{time.time_ns()}",
+    script=base64.b64encode(("echo 1").encode()).decode(),
+    enabled=True,
+    position=10,
+)
 
-w.global_init_scripts.update(script_id=created.script_id,
-                             name=f'sdk-{time.time_ns()}',
-                             script=base64.b64encode(("echo 2").encode()).decode())
+w.global_init_scripts.update(
+    script_id=created.script_id,
+    name=f"sdk-{time.time_ns()}",
+    script=base64.b64encode(("echo 2").encode()).decode(),
+)
 
 # cleanup
 w.global_init_scripts.delete(script_id=created.script_id)
diff --git a/examples/workspace/grants/get_effective_tables.py b/examples/workspace/grants/get_effective_tables.py
index b7958d718..39caa78a3 100755
--- a/examples/workspace/grants/get_effective_tables.py
+++ b/examples/workspace/grants/get_effective_tables.py
@@ -6,22 +6,31 @@
 
 w = WorkspaceClient()
 
-table_name = f'sdk-{time.time_ns()}'
+table_name = f"sdk-{time.time_ns()}"
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
-_ = w.statement_execution.execute(warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
-                                  catalog=created_catalog.name,
-                                  schema=created_schema.name,
-                                  statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name)).result()
+_ = w.statement_execution.execute(
+    warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
+    catalog=created_catalog.name,
+    schema=created_schema.name,
+    statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name),
+).result()
 
-table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name)
+table_full_name = "%s.%s.%s" % (
+    created_catalog.name,
+    created_schema.name,
+    table_name,
+)
 
 created_table = w.tables.get(full_name=table_full_name)
 
-grants = w.grants.get_effective(securable_type=catalog.SecurableType.TABLE, full_name=created_table.full_name)
+grants = w.grants.get_effective(
+    securable_type=catalog.SecurableType.TABLE,
+    full_name=created_table.full_name,
+)
 
 # cleanup
 w.schemas.delete(full_name=created_schema.full_name)
diff --git a/examples/workspace/grants/update_tables.py b/examples/workspace/grants/update_tables.py
index 3bba5dc63..0bfc45848 100755
--- a/examples/workspace/grants/update_tables.py
+++ b/examples/workspace/grants/update_tables.py
@@ -6,29 +6,39 @@
 
 w = WorkspaceClient()
 
-table_name = f'sdk-{time.time_ns()}'
+table_name = f"sdk-{time.time_ns()}"
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
-_ = w.statement_execution.execute(warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
-                                  catalog=created_catalog.name,
-                                  schema=created_schema.name,
-                                  statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name)).result()
+_ = w.statement_execution.execute(
+    warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
+    catalog=created_catalog.name,
+    schema=created_schema.name,
+    statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name),
+).result()
 
-table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name)
+table_full_name = "%s.%s.%s" % (
+    created_catalog.name,
+    created_schema.name,
+    table_name,
+)
 
 account_level_group_name = os.environ["TEST_DATA_ENG_GROUP"]
 
 created_table = w.tables.get(full_name=table_full_name)
 
-x = w.grants.update(full_name=created_table.full_name,
-                    securable_type=catalog.SecurableType.TABLE,
-                    changes=[
-                        catalog.PermissionsChange(add=[catalog.Privilege.MODIFY, catalog.Privilege.SELECT],
-                                                  principal=account_level_group_name)
-                    ])
+x = w.grants.update(
+    full_name=created_table.full_name,
+    securable_type=catalog.SecurableType.TABLE,
+    changes=[
+        catalog.PermissionsChange(
+            add=[catalog.Privilege.MODIFY, catalog.Privilege.SELECT],
+            principal=account_level_group_name,
+        )
+    ],
+)
 
 # cleanup
 w.schemas.delete(full_name=created_schema.full_name)
diff --git a/examples/workspace/groups/create_generic_permissions.py b/examples/workspace/groups/create_generic_permissions.py
index 1ba53cbb1..ef38f8834 100755
--- a/examples/workspace/groups/create_generic_permissions.py
+++ b/examples/workspace/groups/create_generic_permissions.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/groups/create_groups.py b/examples/workspace/groups/create_groups.py
index 1ba53cbb1..ef38f8834 100755
--- a/examples/workspace/groups/create_groups.py
+++ b/examples/workspace/groups/create_groups.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/groups/create_secrets.py b/examples/workspace/groups/create_secrets.py
index 1ba53cbb1..ef38f8834 100755
--- a/examples/workspace/groups/create_secrets.py
+++ b/examples/workspace/groups/create_secrets.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/groups/delete_generic_permissions.py b/examples/workspace/groups/delete_generic_permissions.py
index 261d5c772..3ca22786c 100755
--- a/examples/workspace/groups/delete_generic_permissions.py
+++ b/examples/workspace/groups/delete_generic_permissions.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/groups/delete_groups.py b/examples/workspace/groups/delete_groups.py
index 0033bdbb6..eb70ce550 100755
--- a/examples/workspace/groups/delete_groups.py
+++ b/examples/workspace/groups/delete_groups.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 w.groups.delete(id=group.id)
 
diff --git a/examples/workspace/groups/delete_secrets.py b/examples/workspace/groups/delete_secrets.py
index 261d5c772..3ca22786c 100755
--- a/examples/workspace/groups/delete_secrets.py
+++ b/examples/workspace/groups/delete_secrets.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/groups/get_groups.py b/examples/workspace/groups/get_groups.py
index e6d176a8a..4247d0f95 100755
--- a/examples/workspace/groups/get_groups.py
+++ b/examples/workspace/groups/get_groups.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 fetch = w.groups.get(id=group.id)
 
diff --git a/examples/workspace/groups/patch_groups.py b/examples/workspace/groups/patch_groups.py
index fa86dc661..57172b4ec 100644
--- a/examples/workspace/groups/patch_groups.py
+++ b/examples/workspace/groups/patch_groups.py
@@ -1,21 +1,30 @@
 import time
+
 from databricks.sdk import WorkspaceClient
 from databricks.sdk.service import iam
 
 w = WorkspaceClient()
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}-group')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}-group")
 user = w.users.create(
-    display_name=f'sdk-{time.time_ns()}-user', user_name=f'sdk-{time.time_ns()}@example.com')
+    display_name=f"sdk-{time.time_ns()}-user",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 w.groups.patch(
     id=group.id,
-    operations=[iam.Patch(
-        op=iam.PatchOp.ADD,
-        value={"members": [{
-            "value": user.id,
-        }]},
-    )],
+    operations=[
+        iam.Patch(
+            op=iam.PatchOp.ADD,
+            value={
+                "members": [
+                    {
+                        "value": user.id,
+                    }
+                ]
+            },
+        )
+    ],
     schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
 )
 
diff --git a/examples/workspace/instance_pools/create_instance_pools.py b/examples/workspace/instance_pools/create_instance_pools.py
index de027a221..80d0610c4 100755
--- a/examples/workspace/instance_pools/create_instance_pools.py
+++ b/examples/workspace/instance_pools/create_instance_pools.py
@@ -6,7 +6,7 @@
 
 smallest = w.clusters.select_node_type(local_disk=True)
 
-created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest)
+created = w.instance_pools.create(instance_pool_name=f"sdk-{time.time_ns()}", node_type_id=smallest)
 
 # cleanup
 w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
diff --git a/examples/workspace/instance_pools/edit_instance_pools.py b/examples/workspace/instance_pools/edit_instance_pools.py
index 69a8186e4..0fe6bd6cf 100755
--- a/examples/workspace/instance_pools/edit_instance_pools.py
+++ b/examples/workspace/instance_pools/edit_instance_pools.py
@@ -6,11 +6,13 @@
 
 smallest = w.clusters.select_node_type(local_disk=True)
 
-created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest)
+created = w.instance_pools.create(instance_pool_name=f"sdk-{time.time_ns()}", node_type_id=smallest)
 
-w.instance_pools.edit(instance_pool_id=created.instance_pool_id,
-                      instance_pool_name=f'sdk-{time.time_ns()}',
-                      node_type_id=smallest)
+w.instance_pools.edit(
+    instance_pool_id=created.instance_pool_id,
+    instance_pool_name=f"sdk-{time.time_ns()}",
+    node_type_id=smallest,
+)
 
 # cleanup
 w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
diff --git a/examples/workspace/instance_pools/get_instance_pools.py b/examples/workspace/instance_pools/get_instance_pools.py
index 4eeb201b9..ffe43d9e3 100755
--- a/examples/workspace/instance_pools/get_instance_pools.py
+++ b/examples/workspace/instance_pools/get_instance_pools.py
@@ -6,7 +6,7 @@
 
 smallest = w.clusters.select_node_type(local_disk=True)
 
-created = w.instance_pools.create(instance_pool_name=f'sdk-{time.time_ns()}', node_type_id=smallest)
+created = w.instance_pools.create(instance_pool_name=f"sdk-{time.time_ns()}", node_type_id=smallest)
 
 by_id = w.instance_pools.get(instance_pool_id=created.instance_pool_id)
 
diff --git a/examples/workspace/instance_profiles/add_aws_instance_profiles.py b/examples/workspace/instance_profiles/add_aws_instance_profiles.py
index b090eb178..494eedc3d 100755
--- a/examples/workspace/instance_profiles/add_aws_instance_profiles.py
+++ b/examples/workspace/instance_profiles/add_aws_instance_profiles.py
@@ -4,6 +4,8 @@
 
 arn = "arn:aws:iam::000000000000:instance-profile/abc"
 
-w.instance_profiles.add(instance_profile_arn=arn,
-                        skip_validation=True,
-                        iam_role_arn="arn:aws:iam::000000000000:role/bcd")
+w.instance_profiles.add(
+    instance_profile_arn=arn,
+    skip_validation=True,
+    iam_role_arn="arn:aws:iam::000000000000:role/bcd",
+)
diff --git a/examples/workspace/instance_profiles/edit_aws_instance_profiles.py b/examples/workspace/instance_profiles/edit_aws_instance_profiles.py
index bc1e798b0..8f4936d00 100755
--- a/examples/workspace/instance_profiles/edit_aws_instance_profiles.py
+++ b/examples/workspace/instance_profiles/edit_aws_instance_profiles.py
@@ -4,4 +4,7 @@
 
 arn = "arn:aws:iam::000000000000:instance-profile/abc"
 
-w.instance_profiles.edit(instance_profile_arn=arn, iam_role_arn="arn:aws:iam::000000000000:role/bcdf")
+w.instance_profiles.edit(
+    instance_profile_arn=arn,
+    iam_role_arn="arn:aws:iam::000000000000:role/bcdf",
+)
diff --git a/examples/workspace/ip_access_lists/create_ip_access_lists.py b/examples/workspace/ip_access_lists/create_ip_access_lists.py
index a52b80f92..6cc1c5a3f 100755
--- a/examples/workspace/ip_access_lists/create_ip_access_lists.py
+++ b/examples/workspace/ip_access_lists/create_ip_access_lists.py
@@ -5,9 +5,11 @@
 
 w = WorkspaceClient()
 
-created = w.ip_access_lists.create(label=f'sdk-{time.time_ns()}',
-                                   ip_addresses=["1.0.0.0/16"],
-                                   list_type=settings.ListType.BLOCK)
+created = w.ip_access_lists.create(
+    label=f"sdk-{time.time_ns()}",
+    ip_addresses=["1.0.0.0/16"],
+    list_type=settings.ListType.BLOCK,
+)
 
 # cleanup
 w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
diff --git a/examples/workspace/ip_access_lists/get_ip_access_lists.py b/examples/workspace/ip_access_lists/get_ip_access_lists.py
index 85d7929e3..d50485367 100755
--- a/examples/workspace/ip_access_lists/get_ip_access_lists.py
+++ b/examples/workspace/ip_access_lists/get_ip_access_lists.py
@@ -5,9 +5,11 @@
 
 w = WorkspaceClient()
 
-created = w.ip_access_lists.create(label=f'sdk-{time.time_ns()}',
-                                   ip_addresses=["1.0.0.0/16"],
-                                   list_type=settings.ListType.BLOCK)
+created = w.ip_access_lists.create(
+    label=f"sdk-{time.time_ns()}",
+    ip_addresses=["1.0.0.0/16"],
+    list_type=settings.ListType.BLOCK,
+)
 
 by_id = w.ip_access_lists.get(ip_access_list_id=created.ip_access_list.list_id)
 
diff --git a/examples/workspace/ip_access_lists/replace_ip_access_lists.py b/examples/workspace/ip_access_lists/replace_ip_access_lists.py
index a61f5f81b..e896899fd 100755
--- a/examples/workspace/ip_access_lists/replace_ip_access_lists.py
+++ b/examples/workspace/ip_access_lists/replace_ip_access_lists.py
@@ -5,15 +5,19 @@
 
 w = WorkspaceClient()
 
-created = w.ip_access_lists.create(label=f'sdk-{time.time_ns()}',
-                                   ip_addresses=["1.0.0.0/16"],
-                                   list_type=settings.ListType.BLOCK)
+created = w.ip_access_lists.create(
+    label=f"sdk-{time.time_ns()}",
+    ip_addresses=["1.0.0.0/16"],
+    list_type=settings.ListType.BLOCK,
+)
 
-w.ip_access_lists.replace(ip_access_list_id=created.ip_access_list.list_id,
-                          label=f'sdk-{time.time_ns()}',
-                          ip_addresses=["1.0.0.0/24"],
-                          list_type=settings.ListType.BLOCK,
-                          enabled=False)
+w.ip_access_lists.replace(
+    ip_access_list_id=created.ip_access_list.list_id,
+    label=f"sdk-{time.time_ns()}",
+    ip_addresses=["1.0.0.0/24"],
+    list_type=settings.ListType.BLOCK,
+    enabled=False,
+)
 
 # cleanup
 w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
diff --git a/examples/workspace/jobs/cancel_all_runs_jobs_api_full_integration.py b/examples/workspace/jobs/cancel_all_runs_jobs_api_full_integration.py
index 260f6f882..daddcf027 100755
--- a/examples/workspace/jobs/cancel_all_runs_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/cancel_all_runs_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 w.jobs.cancel_all_runs(job_id=created_job.job_id)
 
diff --git a/examples/workspace/jobs/cancel_run_jobs_api_full_integration.py b/examples/workspace/jobs/cancel_run_jobs_api_full_integration.py
index 9cb4f75cd..03df31221 100755
--- a/examples/workspace/jobs/cancel_run_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/cancel_run_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
+
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
+
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 run_now_response = w.jobs.run_now(job_id=created_job.job_id)
 
diff --git a/examples/workspace/jobs/create_jobs_api_full_integration.py b/examples/workspace/jobs/create_jobs_api_full_integration.py
index 1f5082886..432ad5655 100755
--- a/examples/workspace/jobs/create_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/create_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 # cleanup
 w.jobs.delete(job_id=created_job.job_id)
diff --git a/examples/workspace/jobs/export_run_jobs_api_full_integration.py b/examples/workspace/jobs/export_run_jobs_api_full_integration.py
index 613961199..fccef3fdd 100755
--- a/examples/workspace/jobs/export_run_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/export_run_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
+
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
+
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 run_by_id = w.jobs.run_now(job_id=created_job.job_id).result()
 
diff --git a/examples/workspace/jobs/get_jobs_api_full_integration.py b/examples/workspace/jobs/get_jobs_api_full_integration.py
index 3a30f1794..6b7230b00 100755
--- a/examples/workspace/jobs/get_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/get_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 by_id = w.jobs.get(job_id=created_job.job_id)
 
diff --git a/examples/workspace/jobs/get_run_output_jobs_api_full_integration.py b/examples/workspace/jobs/get_run_output_jobs_api_full_integration.py
index 804577415..e50b1e5a4 100755
--- a/examples/workspace/jobs/get_run_output_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/get_run_output_jobs_api_full_integration.py
@@ -6,17 +6,22 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-run = w.jobs.submit(run_name=f'sdk-{time.time_ns()}',
-                    tasks=[
-                        jobs.SubmitTask(existing_cluster_id=cluster_id,
-                                        notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                        task_key=f'sdk-{time.time_ns()}')
-                    ]).result()
+run = w.jobs.submit(
+    run_name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.SubmitTask(
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key=f"sdk-{time.time_ns()}",
+        )
+    ],
+).result()
 
 output = w.jobs.get_run_output(run_id=run.tasks[0].run_id)
 
diff --git a/examples/workspace/jobs/list_runs_jobs_api_full_integration.py b/examples/workspace/jobs/list_runs_jobs_api_full_integration.py
index f863309d4..6e5e84938 100755
--- a/examples/workspace/jobs/list_runs_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/list_runs_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 run_list = w.jobs.list_runs(job_id=created_job.job_id)
 
diff --git a/examples/workspace/jobs/repair_run_jobs_api_full_integration.py b/examples/workspace/jobs/repair_run_jobs_api_full_integration.py
index b90cbc1d8..701bc9e90 100755
--- a/examples/workspace/jobs/repair_run_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/repair_run_jobs_api_full_integration.py
@@ -6,26 +6,33 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
+
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
+
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 run_now_response = w.jobs.run_now(job_id=created_job.job_id)
 
 cancelled_run = w.jobs.cancel_run(run_id=run_now_response.response.run_id).result()
 
-repaired_run = w.jobs.repair_run(rerun_tasks=[cancelled_run.tasks[0].task_key],
-                                 run_id=run_now_response.response.run_id).result()
+repaired_run = w.jobs.repair_run(
+    rerun_tasks=[cancelled_run.tasks[0].task_key],
+    run_id=run_now_response.response.run_id,
+).result()
 
 # cleanup
 w.jobs.delete(job_id=created_job.job_id)
diff --git a/examples/workspace/jobs/reset_jobs_api_full_integration.py b/examples/workspace/jobs/reset_jobs_api_full_integration.py
index 3215fe6fd..15cdd426f 100755
--- a/examples/workspace/jobs/reset_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/reset_jobs_api_full_integration.py
@@ -6,25 +6,33 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
-
-new_name = f'sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
+
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
+
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
+
+new_name = f"sdk-{time.time_ns()}"
 
 by_id = w.jobs.get(job_id=created_job.job_id)
 
-w.jobs.reset(job_id=by_id.job_id, new_settings=jobs.JobSettings(name=new_name, tasks=by_id.settings.tasks))
+w.jobs.reset(
+    job_id=by_id.job_id,
+    new_settings=jobs.JobSettings(name=new_name, tasks=by_id.settings.tasks),
+)
 
 # cleanup
 w.jobs.delete(job_id=created_job.job_id)
diff --git a/examples/workspace/jobs/run_now_jobs_api_full_integration.py b/examples/workspace/jobs/run_now_jobs_api_full_integration.py
index 3896990b6..d3f6f5fb2 100755
--- a/examples/workspace/jobs/run_now_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/run_now_jobs_api_full_integration.py
@@ -6,19 +6,24 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
 
 run_by_id = w.jobs.run_now(job_id=created_job.job_id).result()
 
diff --git a/examples/workspace/jobs/submit_jobs_api_full_integration.py b/examples/workspace/jobs/submit_jobs_api_full_integration.py
index 5769d7675..5e73904d8 100755
--- a/examples/workspace/jobs/submit_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/submit_jobs_api_full_integration.py
@@ -6,17 +6,22 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
 
-run = w.jobs.submit(run_name=f'sdk-{time.time_ns()}',
-                    tasks=[
-                        jobs.SubmitTask(existing_cluster_id=cluster_id,
-                                        notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                        task_key=f'sdk-{time.time_ns()}')
-                    ]).result()
+run = w.jobs.submit(
+    run_name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.SubmitTask(
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key=f"sdk-{time.time_ns()}",
+        )
+    ],
+).result()
 
 # cleanup
 w.jobs.delete_run(run_id=run.run_id)
diff --git a/examples/workspace/jobs/update_jobs_api_full_integration.py b/examples/workspace/jobs/update_jobs_api_full_integration.py
index 61194b8e8..00e9ea29d 100755
--- a/examples/workspace/jobs/update_jobs_api_full_integration.py
+++ b/examples/workspace/jobs/update_jobs_api_full_integration.py
@@ -6,23 +6,31 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-
-new_name = f'sdk-{time.time_ns()}'
-
-created_job = w.jobs.create(name=f'sdk-{time.time_ns()}',
-                            tasks=[
-                                jobs.Task(description="test",
-                                          existing_cluster_id=cluster_id,
-                                          notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
-                                          task_key="test",
-                                          timeout_seconds=0)
-                            ])
-
-w.jobs.update(job_id=created_job.job_id, new_settings=jobs.JobSettings(name=new_name, max_concurrent_runs=5))
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
+
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
+
+new_name = f"sdk-{time.time_ns()}"
+
+created_job = w.jobs.create(
+    name=f"sdk-{time.time_ns()}",
+    tasks=[
+        jobs.Task(
+            description="test",
+            existing_cluster_id=cluster_id,
+            notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
+            task_key="test",
+            timeout_seconds=0,
+        )
+    ],
+)
+
+w.jobs.update(
+    job_id=created_job.job_id,
+    new_settings=jobs.JobSettings(name=new_name, max_concurrent_runs=5),
+)
 
 # cleanup
 w.jobs.delete(job_id=created_job.job_id)
diff --git a/examples/workspace/libraries/update_libraries.py b/examples/workspace/libraries/update_libraries.py
index d741ee054..15a68dccc 100755
--- a/examples/workspace/libraries/update_libraries.py
+++ b/examples/workspace/libraries/update_libraries.py
@@ -5,8 +5,11 @@
 
 w = WorkspaceClient()
 
-cluster_id = w.clusters.ensure_cluster_is_running(
-    os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-
-w.libraries.update(cluster_id=cluster_id,
-                   install=[compute.Library(pypi=compute.PythonPyPiLibrary(package="dbl-tempo"))]).result()
+cluster_id = (
+    w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
+)
+
+w.libraries.update(
+    cluster_id=cluster_id,
+    install=[compute.Library(pypi=compute.PythonPyPiLibrary(package="dbl-tempo"))],
+).result()
diff --git a/examples/workspace/metastores/assign_metastores.py b/examples/workspace/metastores/assign_metastores.py
index 6a874bfdd..c344657ea 100755
--- a/examples/workspace/metastores/assign_metastores.py
+++ b/examples/workspace/metastores/assign_metastores.py
@@ -7,9 +7,10 @@
 
 workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 w.metastores.assign(metastore_id=created.metastore_id, workspace_id=workspace_id)
 
diff --git a/examples/workspace/metastores/create_metastores.py b/examples/workspace/metastores/create_metastores.py
index bce183b4b..3d5ae268b 100755
--- a/examples/workspace/metastores/create_metastores.py
+++ b/examples/workspace/metastores/create_metastores.py
@@ -5,9 +5,10 @@
 
 w = WorkspaceClient()
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 # cleanup
 w.metastores.delete(id=created.metastore_id, force=True)
diff --git a/examples/workspace/metastores/enable_optimization_metastores.py b/examples/workspace/metastores/enable_optimization_metastores.py
index 8d3d4cd08..f9c10daf5 100755
--- a/examples/workspace/metastores/enable_optimization_metastores.py
+++ b/examples/workspace/metastores/enable_optimization_metastores.py
@@ -5,9 +5,10 @@
 
 w = WorkspaceClient()
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 auto_maintenance = w.metastores.enable_optimization(enable=True, metastore_id=created.metastore_id)
 
diff --git a/examples/workspace/metastores/get_metastores.py b/examples/workspace/metastores/get_metastores.py
index 38ce05c0a..1c0142f7b 100755
--- a/examples/workspace/metastores/get_metastores.py
+++ b/examples/workspace/metastores/get_metastores.py
@@ -5,9 +5,10 @@
 
 w = WorkspaceClient()
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 _ = w.metastores.get(id=created.metastore_id)
 
diff --git a/examples/workspace/metastores/maintenance_metastores.py b/examples/workspace/metastores/maintenance_metastores.py
index acad599c7..3a0f4bb6e 100755
--- a/examples/workspace/metastores/maintenance_metastores.py
+++ b/examples/workspace/metastores/maintenance_metastores.py
@@ -5,9 +5,10 @@
 
 w = WorkspaceClient()
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 auto_maintenance = w.metastores.maintenance(enable=True, metastore_id=created.metastore_id)
 
diff --git a/examples/workspace/metastores/unassign_metastores.py b/examples/workspace/metastores/unassign_metastores.py
index ec023cbdc..19f306933 100755
--- a/examples/workspace/metastores/unassign_metastores.py
+++ b/examples/workspace/metastores/unassign_metastores.py
@@ -7,9 +7,10 @@
 
 workspace_id = os.environ["DUMMY_WORKSPACE_ID"]
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
 w.metastores.unassign(metastore_id=created.metastore_id, workspace_id=workspace_id)
 
diff --git a/examples/workspace/metastores/update_metastores.py b/examples/workspace/metastores/update_metastores.py
index f7152b552..db67dafc6 100755
--- a/examples/workspace/metastores/update_metastores.py
+++ b/examples/workspace/metastores/update_metastores.py
@@ -5,11 +5,12 @@
 
 w = WorkspaceClient()
 
-created = w.metastores.create(name=f'sdk-{time.time_ns()}',
-                              storage_root="s3://%s/%s" %
-                              (os.environ["TEST_BUCKET"], f'sdk-{time.time_ns()}'))
+created = w.metastores.create(
+    name=f"sdk-{time.time_ns()}",
+    storage_root="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
+)
 
-_ = w.metastores.update(id=created.metastore_id, new_name=f'sdk-{time.time_ns()}')
+_ = w.metastores.update(id=created.metastore_id, new_name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.metastores.delete(id=created.metastore_id, force=True)
diff --git a/examples/workspace/model_registry/create_comment_model_version_comments.py b/examples/workspace/model_registry/create_comment_model_version_comments.py
index 8bb5decd4..01ac6ff07 100755
--- a/examples/workspace/model_registry/create_comment_model_version_comments.py
+++ b/examples/workspace/model_registry/create_comment_model_version_comments.py
@@ -4,13 +4,15 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
 
-created = w.model_registry.create_comment(comment=f'sdk-{time.time_ns()}',
-                                          name=mv.model_version.name,
-                                          version=mv.model_version.version)
+created = w.model_registry.create_comment(
+    comment=f"sdk-{time.time_ns()}",
+    name=mv.model_version.name,
+    version=mv.model_version.version,
+)
 
 # cleanup
 w.model_registry.delete_comment(id=created.comment.id)
diff --git a/examples/workspace/model_registry/create_model_model_version_comments.py b/examples/workspace/model_registry/create_model_model_version_comments.py
index f09c8691f..6b32b1aa8 100755
--- a/examples/workspace/model_registry/create_model_model_version_comments.py
+++ b/examples/workspace/model_registry/create_model_model_version_comments.py
@@ -4,4 +4,4 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
diff --git a/examples/workspace/model_registry/create_model_model_versions.py b/examples/workspace/model_registry/create_model_model_versions.py
index f09c8691f..6b32b1aa8 100755
--- a/examples/workspace/model_registry/create_model_model_versions.py
+++ b/examples/workspace/model_registry/create_model_model_versions.py
@@ -4,4 +4,4 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
diff --git a/examples/workspace/model_registry/create_model_models.py b/examples/workspace/model_registry/create_model_models.py
index eb1316e0f..4369d597d 100755
--- a/examples/workspace/model_registry/create_model_models.py
+++ b/examples/workspace/model_registry/create_model_models.py
@@ -4,4 +4,4 @@
 
 w = WorkspaceClient()
 
-created = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
diff --git a/examples/workspace/model_registry/create_model_version_model_version_comments.py b/examples/workspace/model_registry/create_model_version_model_version_comments.py
index e7cf59c58..d95b0c6b4 100755
--- a/examples/workspace/model_registry/create_model_version_model_version_comments.py
+++ b/examples/workspace/model_registry/create_model_version_model_version_comments.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
diff --git a/examples/workspace/model_registry/create_model_version_model_versions.py b/examples/workspace/model_registry/create_model_version_model_versions.py
index 83ae3ee5d..f6bf77147 100755
--- a/examples/workspace/model_registry/create_model_version_model_versions.py
+++ b/examples/workspace/model_registry/create_model_version_model_versions.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
diff --git a/examples/workspace/model_registry/create_webhook_registry_webhooks.py b/examples/workspace/model_registry/create_webhook_registry_webhooks.py
index c38600b55..3a29e5443 100755
--- a/examples/workspace/model_registry/create_webhook_registry_webhooks.py
+++ b/examples/workspace/model_registry/create_webhook_registry_webhooks.py
@@ -5,9 +5,11 @@
 
 w = WorkspaceClient()
 
-created = w.model_registry.create_webhook(description=f'sdk-{time.time_ns()}',
-                                          events=[ml.RegistryWebhookEvent.MODEL_VERSION_CREATED],
-                                          http_url_spec=ml.HttpUrlSpec(url=w.config.host))
+created = w.model_registry.create_webhook(
+    description=f"sdk-{time.time_ns()}",
+    events=[ml.RegistryWebhookEvent.MODEL_VERSION_CREATED],
+    http_url_spec=ml.HttpUrlSpec(url=w.config.host),
+)
 
 # cleanup
 w.model_registry.delete_webhook(id=created.webhook.id)
diff --git a/examples/workspace/model_registry/get_model_models.py b/examples/workspace/model_registry/get_model_models.py
index 0393ed51b..9a9486d02 100755
--- a/examples/workspace/model_registry/get_model_models.py
+++ b/examples/workspace/model_registry/get_model_models.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-created = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 model = w.model_registry.get_model(name=created.registered_model.name)
diff --git a/examples/workspace/model_registry/update_comment_model_version_comments.py b/examples/workspace/model_registry/update_comment_model_version_comments.py
index 6e10540e4..0f291564e 100755
--- a/examples/workspace/model_registry/update_comment_model_version_comments.py
+++ b/examples/workspace/model_registry/update_comment_model_version_comments.py
@@ -4,15 +4,17 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
 
-created = w.model_registry.create_comment(comment=f'sdk-{time.time_ns()}',
-                                          name=mv.model_version.name,
-                                          version=mv.model_version.version)
+created = w.model_registry.create_comment(
+    comment=f"sdk-{time.time_ns()}",
+    name=mv.model_version.name,
+    version=mv.model_version.version,
+)
 
-_ = w.model_registry.update_comment(comment=f'sdk-{time.time_ns()}', id=created.comment.id)
+_ = w.model_registry.update_comment(comment=f"sdk-{time.time_ns()}", id=created.comment.id)
 
 # cleanup
 w.model_registry.delete_comment(id=created.comment.id)
diff --git a/examples/workspace/model_registry/update_model_models.py b/examples/workspace/model_registry/update_model_models.py
index 5a0d8618c..0777b292a 100755
--- a/examples/workspace/model_registry/update_model_models.py
+++ b/examples/workspace/model_registry/update_model_models.py
@@ -4,9 +4,11 @@
 
 w = WorkspaceClient()
 
-created = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 model = w.model_registry.get_model(name=created.registered_model.name)
 
-w.model_registry.update_model(name=model.registered_model_databricks.name,
-                              description=f'sdk-{time.time_ns()}')
+w.model_registry.update_model(
+    name=model.registered_model_databricks.name,
+    description=f"sdk-{time.time_ns()}",
+)
diff --git a/examples/workspace/model_registry/update_model_version_model_versions.py b/examples/workspace/model_registry/update_model_version_model_versions.py
index 18050c180..312cfcf9b 100755
--- a/examples/workspace/model_registry/update_model_version_model_versions.py
+++ b/examples/workspace/model_registry/update_model_version_model_versions.py
@@ -4,10 +4,12 @@
 
 w = WorkspaceClient()
 
-model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
+model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")
 
 created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
 
-w.model_registry.update_model_version(description=f'sdk-{time.time_ns()}',
-                                      name=created.model_version.name,
-                                      version=created.model_version.version)
+w.model_registry.update_model_version(
+    description=f"sdk-{time.time_ns()}",
+    name=created.model_version.name,
+    version=created.model_version.version,
+)
diff --git a/examples/workspace/model_registry/update_webhook_registry_webhooks.py b/examples/workspace/model_registry/update_webhook_registry_webhooks.py
index 638582ef7..99d8f6f34 100755
--- a/examples/workspace/model_registry/update_webhook_registry_webhooks.py
+++ b/examples/workspace/model_registry/update_webhook_registry_webhooks.py
@@ -5,11 +5,13 @@
 
 w = WorkspaceClient()
 
-created = w.model_registry.create_webhook(description=f'sdk-{time.time_ns()}',
-                                          events=[ml.RegistryWebhookEvent.MODEL_VERSION_CREATED],
-                                          http_url_spec=ml.HttpUrlSpec(url=w.config.host))
+created = w.model_registry.create_webhook(
+    description=f"sdk-{time.time_ns()}",
+    events=[ml.RegistryWebhookEvent.MODEL_VERSION_CREATED],
+    http_url_spec=ml.HttpUrlSpec(url=w.config.host),
+)
 
-w.model_registry.update_webhook(id=created.webhook.id, description=f'sdk-{time.time_ns()}')
+w.model_registry.update_webhook(id=created.webhook.id, description=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.model_registry.delete_webhook(id=created.webhook.id)
diff --git a/examples/workspace/permissions/get_generic_permissions.py b/examples/workspace/permissions/get_generic_permissions.py
index c103bbdde..d1006c6e5 100755
--- a/examples/workspace/permissions/get_generic_permissions.py
+++ b/examples/workspace/permissions/get_generic_permissions.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 obj = w.workspace.get_status(path=notebook_path)
 
diff --git a/examples/workspace/permissions/get_permission_levels_generic_permissions.py b/examples/workspace/permissions/get_permission_levels_generic_permissions.py
index 4dcd5bd25..ce54b0020 100755
--- a/examples/workspace/permissions/get_permission_levels_generic_permissions.py
+++ b/examples/workspace/permissions/get_permission_levels_generic_permissions.py
@@ -4,9 +4,8 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 obj = w.workspace.get_status(path=notebook_path)
 
-levels = w.permissions.get_permission_levels(request_object_type="notebooks",
-                                             request_object_id="%d" % (obj.object_id))
+levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id))
diff --git a/examples/workspace/permissions/set_generic_permissions.py b/examples/workspace/permissions/set_generic_permissions.py
index 6e42b8183..fec6c53c8 100755
--- a/examples/workspace/permissions/set_generic_permissions.py
+++ b/examples/workspace/permissions/set_generic_permissions.py
@@ -5,18 +5,22 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
 obj = w.workspace.get_status(path=notebook_path)
 
-_ = w.permissions.set(request_object_type="notebooks",
-                      request_object_id="%d" % (obj.object_id),
-                      access_control_list=[
-                          iam.AccessControlRequest(group_name=group.display_name,
-                                                   permission_level=iam.PermissionLevel.CAN_RUN)
-                      ])
+_ = w.permissions.set(
+    request_object_type="notebooks",
+    request_object_id="%d" % (obj.object_id),
+    access_control_list=[
+        iam.AccessControlRequest(
+            group_name=group.display_name,
+            permission_level=iam.PermissionLevel.CAN_RUN,
+        )
+    ],
+)
 
 # cleanup
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/pipelines/create_pipelines.py b/examples/workspace/pipelines/create_pipelines.py
index 5a2b3933e..a0f6bb428 100755
--- a/examples/workspace/pipelines/create_pipelines.py
+++ b/examples/workspace/pipelines/create_pipelines.py
@@ -6,20 +6,23 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 created = w.pipelines.create(
     continuous=False,
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     libraries=[pipelines.PipelineLibrary(notebook=pipelines.NotebookLibrary(path=notebook_path))],
     clusters=[
-        pipelines.PipelineCluster(instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                                  label="default",
-                                  num_workers=1,
-                                  custom_tags={
-                                      "cluster_type": "default",
-                                  })
-    ])
+        pipelines.PipelineCluster(
+            instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+            label="default",
+            num_workers=1,
+            custom_tags={
+                "cluster_type": "default",
+            },
+        )
+    ],
+)
 
 # cleanup
 w.pipelines.delete(pipeline_id=created.pipeline_id)
diff --git a/examples/workspace/pipelines/get_pipelines.py b/examples/workspace/pipelines/get_pipelines.py
index 6222b4c86..f20999148 100755
--- a/examples/workspace/pipelines/get_pipelines.py
+++ b/examples/workspace/pipelines/get_pipelines.py
@@ -6,20 +6,23 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 created = w.pipelines.create(
     continuous=False,
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     libraries=[pipelines.PipelineLibrary(notebook=pipelines.NotebookLibrary(path=notebook_path))],
     clusters=[
-        pipelines.PipelineCluster(instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                                  label="default",
-                                  num_workers=1,
-                                  custom_tags={
-                                      "cluster_type": "default",
-                                  })
-    ])
+        pipelines.PipelineCluster(
+            instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+            label="default",
+            num_workers=1,
+            custom_tags={
+                "cluster_type": "default",
+            },
+        )
+    ],
+)
 
 by_id = w.pipelines.get(pipeline_id=created.pipeline_id)
 
diff --git a/examples/workspace/pipelines/list_pipeline_events_pipelines.py b/examples/workspace/pipelines/list_pipeline_events_pipelines.py
index c4150187c..68d2197d3 100755
--- a/examples/workspace/pipelines/list_pipeline_events_pipelines.py
+++ b/examples/workspace/pipelines/list_pipeline_events_pipelines.py
@@ -6,20 +6,23 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 created = w.pipelines.create(
     continuous=False,
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     libraries=[pipelines.PipelineLibrary(notebook=pipelines.NotebookLibrary(path=notebook_path))],
     clusters=[
-        pipelines.PipelineCluster(instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                                  label="default",
-                                  num_workers=1,
-                                  custom_tags={
-                                      "cluster_type": "default",
-                                  })
-    ])
+        pipelines.PipelineCluster(
+            instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+            label="default",
+            num_workers=1,
+            custom_tags={
+                "cluster_type": "default",
+            },
+        )
+    ],
+)
 
 events = w.pipelines.list_pipeline_events(pipeline_id=created.pipeline_id)
 
diff --git a/examples/workspace/pipelines/update_pipelines.py b/examples/workspace/pipelines/update_pipelines.py
index 656d90071..8e0b92a54 100755
--- a/examples/workspace/pipelines/update_pipelines.py
+++ b/examples/workspace/pipelines/update_pipelines.py
@@ -6,33 +6,39 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 created = w.pipelines.create(
     continuous=False,
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     libraries=[pipelines.PipelineLibrary(notebook=pipelines.NotebookLibrary(path=notebook_path))],
     clusters=[
-        pipelines.PipelineCluster(instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                                  label="default",
-                                  num_workers=1,
-                                  custom_tags={
-                                      "cluster_type": "default",
-                                  })
-    ])
+        pipelines.PipelineCluster(
+            instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+            label="default",
+            num_workers=1,
+            custom_tags={
+                "cluster_type": "default",
+            },
+        )
+    ],
+)
 
 w.pipelines.update(
     pipeline_id=created.pipeline_id,
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     libraries=[pipelines.PipelineLibrary(notebook=pipelines.NotebookLibrary(path=notebook_path))],
     clusters=[
-        pipelines.PipelineCluster(instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
-                                  label="default",
-                                  num_workers=1,
-                                  custom_tags={
-                                      "cluster_type": "default",
-                                  })
-    ])
+        pipelines.PipelineCluster(
+            instance_pool_id=os.environ["TEST_INSTANCE_POOL_ID"],
+            label="default",
+            num_workers=1,
+            custom_tags={
+                "cluster_type": "default",
+            },
+        )
+    ],
+)
 
 # cleanup
 w.pipelines.delete(pipeline_id=created.pipeline_id)
diff --git a/examples/workspace/providers/create_providers.py b/examples/workspace/providers/create_providers.py
index cee06e5d2..07e16fe94 100755
--- a/examples/workspace/providers/create_providers.py
+++ b/examples/workspace/providers/create_providers.py
@@ -11,7 +11,7 @@
     }
 """
 
-created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient)
+created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient)
 
 # cleanup
 w.providers.delete(name=created.name)
diff --git a/examples/workspace/providers/get_providers.py b/examples/workspace/providers/get_providers.py
index ba86bc5ea..7c9619f58 100755
--- a/examples/workspace/providers/get_providers.py
+++ b/examples/workspace/providers/get_providers.py
@@ -11,7 +11,7 @@
     }
 """
 
-created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient)
+created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient)
 
 _ = w.providers.get(name=created.name)
 
diff --git a/examples/workspace/providers/list_shares_providers.py b/examples/workspace/providers/list_shares_providers.py
index b2e885938..0ca84791a 100755
--- a/examples/workspace/providers/list_shares_providers.py
+++ b/examples/workspace/providers/list_shares_providers.py
@@ -11,7 +11,7 @@
     }
 """
 
-created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient)
+created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient)
 
 shares = w.providers.list_shares(name=created.name)
 
diff --git a/examples/workspace/providers/update_providers.py b/examples/workspace/providers/update_providers.py
index 66b68c35c..b97460f21 100755
--- a/examples/workspace/providers/update_providers.py
+++ b/examples/workspace/providers/update_providers.py
@@ -11,7 +11,7 @@
     }
 """
 
-created = w.providers.create(name=f'sdk-{time.time_ns()}', recipient_profile_str=public_share_recipient)
+created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient)
 
 _ = w.providers.update(name=created.name, comment="Comment for update")
 
diff --git a/examples/workspace/queries/create_alerts.py b/examples/workspace/queries/create_alerts.py
index f0213aea9..a539beda1 100755
--- a/examples/workspace/queries/create_alerts.py
+++ b/examples/workspace/queries/create_alerts.py
@@ -7,10 +7,14 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SELECT 1"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SELECT 1",
+    )
+)
 
 # cleanup
 w.queries.delete(id=query.id)
diff --git a/examples/workspace/queries/create_queries.py b/examples/workspace/queries/create_queries.py
index ce293d410..b157d5cf1 100755
--- a/examples/workspace/queries/create_queries.py
+++ b/examples/workspace/queries/create_queries.py
@@ -7,10 +7,14 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SHOW TABLES"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SHOW TABLES",
+    )
+)
 
 # cleanup
 w.queries.delete(id=query.id)
diff --git a/examples/workspace/queries/get_queries.py b/examples/workspace/queries/get_queries.py
index f1854d306..0f38eabe9 100755
--- a/examples/workspace/queries/get_queries.py
+++ b/examples/workspace/queries/get_queries.py
@@ -7,10 +7,14 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SHOW TABLES"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SHOW TABLES",
+    )
+)
 
 by_id = w.queries.get(id=query.id)
 
diff --git a/examples/workspace/queries/update_queries.py b/examples/workspace/queries/update_queries.py
index 948d9a916..530240d43 100755
--- a/examples/workspace/queries/update_queries.py
+++ b/examples/workspace/queries/update_queries.py
@@ -7,16 +7,24 @@
 
 srcs = w.data_sources.list()
 
-query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                           warehouse_id=srcs[0].warehouse_id,
-                                                           description="test query from Go SDK",
-                                                           query_text="SHOW TABLES"))
+query = w.queries.create(
+    query=sql.CreateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        warehouse_id=srcs[0].warehouse_id,
+        description="test query from Go SDK",
+        query_text="SHOW TABLES",
+    )
+)
 
-updated = w.queries.update(id=query.id,
-                           query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}',
-                                                             description="UPDATED: test query from Go SDK",
-                                                             query_text="SELECT 2+2"),
-                           update_mask="display_name,description,query_text")
+updated = w.queries.update(
+    id=query.id,
+    query=sql.UpdateQueryRequestQuery(
+        display_name=f"sdk-{time.time_ns()}",
+        description="UPDATED: test query from Go SDK",
+        query_text="SELECT 2+2",
+    ),
+    update_mask="display_name,description,query_text",
+)
 
 # cleanup
 w.queries.delete(id=query.id)
diff --git a/examples/workspace/query_history/list_sql_query_history.py b/examples/workspace/query_history/list_sql_query_history.py
index 672b46ed5..14e4a870f 100755
--- a/examples/workspace/query_history/list_sql_query_history.py
+++ b/examples/workspace/query_history/list_sql_query_history.py
@@ -3,5 +3,8 @@
 
 w = WorkspaceClient()
 
-_ = w.query_history.list(filter_by=sql.QueryFilter(
-    query_start_time_range=sql.TimeRange(start_time_ms=1690243200000, end_time_ms=1690329600000)))
+_ = w.query_history.list(
+    filter_by=sql.QueryFilter(
+        query_start_time_range=sql.TimeRange(start_time_ms=1690243200000, end_time_ms=1690329600000)
+    )
+)
diff --git a/examples/workspace/recipients/create_recipients.py b/examples/workspace/recipients/create_recipients.py
index 4c01e2f5e..618da473a 100755
--- a/examples/workspace/recipients/create_recipients.py
+++ b/examples/workspace/recipients/create_recipients.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.recipients.create(name=f'sdk-{time.time_ns()}')
+created = w.recipients.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.recipients.delete(name=created.name)
diff --git a/examples/workspace/recipients/get_recipients.py b/examples/workspace/recipients/get_recipients.py
index 4e8998e41..a5bc7d6d8 100755
--- a/examples/workspace/recipients/get_recipients.py
+++ b/examples/workspace/recipients/get_recipients.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.recipients.create(name=f'sdk-{time.time_ns()}')
+created = w.recipients.create(name=f"sdk-{time.time_ns()}")
 
 _ = w.recipients.get(name=created.name)
 
diff --git a/examples/workspace/recipients/rotate_token_recipients.py b/examples/workspace/recipients/rotate_token_recipients.py
index 5abc2af9e..f1fe79bfd 100755
--- a/examples/workspace/recipients/rotate_token_recipients.py
+++ b/examples/workspace/recipients/rotate_token_recipients.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.recipients.create(name=f'sdk-{time.time_ns()}')
+created = w.recipients.create(name=f"sdk-{time.time_ns()}")
 
 recipient_info = w.recipients.rotate_token(name=created.name, existing_token_expire_in_seconds=0)
 
diff --git a/examples/workspace/recipients/share_permissions_recipients.py b/examples/workspace/recipients/share_permissions_recipients.py
index 1f04c0321..633a77698 100755
--- a/examples/workspace/recipients/share_permissions_recipients.py
+++ b/examples/workspace/recipients/share_permissions_recipients.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.recipients.create(name=f'sdk-{time.time_ns()}')
+created = w.recipients.create(name=f"sdk-{time.time_ns()}")
 
 share_permissions = w.recipients.share_permissions(name=created.name)
 
diff --git a/examples/workspace/recipients/update_recipients.py b/examples/workspace/recipients/update_recipients.py
index 959266dde..ec0df221a 100755
--- a/examples/workspace/recipients/update_recipients.py
+++ b/examples/workspace/recipients/update_recipients.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-created = w.recipients.create(name=f'sdk-{time.time_ns()}')
+created = w.recipients.create(name=f"sdk-{time.time_ns()}")
 
-w.recipients.update(name=created.name, comment=f'sdk-{time.time_ns()}')
+w.recipients.update(name=created.name, comment=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.recipients.delete(name=created.name)
diff --git a/examples/workspace/repos/create_repos.py b/examples/workspace/repos/create_repos.py
index b3a100949..07373c6b4 100755
--- a/examples/workspace/repos/create_repos.py
+++ b/examples/workspace/repos/create_repos.py
@@ -4,9 +4,13 @@
 
 w = WorkspaceClient()
 
-root = f'sdk-{time.time_ns()}'
+root = f"sdk-{time.time_ns()}"
 
-ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github")
+ri = w.repos.create(
+    path=root,
+    url="https://github.com/shreyas-goenka/empty-repo.git",
+    provider="github",
+)
 
 # cleanup
 w.repos.delete(repo_id=ri.id)
diff --git a/examples/workspace/repos/get_repos.py b/examples/workspace/repos/get_repos.py
index fcb6c69c6..9585b7585 100755
--- a/examples/workspace/repos/get_repos.py
+++ b/examples/workspace/repos/get_repos.py
@@ -4,9 +4,13 @@
 
 w = WorkspaceClient()
 
-root = f'sdk-{time.time_ns()}'
+root = f"sdk-{time.time_ns()}"
 
-ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github")
+ri = w.repos.create(
+    path=root,
+    url="https://github.com/shreyas-goenka/empty-repo.git",
+    provider="github",
+)
 
 by_id = w.repos.get(repo_id=ri.id)
 
diff --git a/examples/workspace/repos/update_repos.py b/examples/workspace/repos/update_repos.py
index afb94375a..ca68ef140 100755
--- a/examples/workspace/repos/update_repos.py
+++ b/examples/workspace/repos/update_repos.py
@@ -4,9 +4,13 @@
 
 w = WorkspaceClient()
 
-root = f'sdk-{time.time_ns()}'
+root = f"sdk-{time.time_ns()}"
 
-ri = w.repos.create(path=root, url="https://github.com/shreyas-goenka/empty-repo.git", provider="github")
+ri = w.repos.create(
+    path=root,
+    url="https://github.com/shreyas-goenka/empty-repo.git",
+    provider="github",
+)
 
 w.repos.update(repo_id=ri.id, branch="foo")
 
diff --git a/examples/workspace/schemas/create_schemas.py b/examples/workspace/schemas/create_schemas.py
index 7bc3bb19f..5d9a57f44 100755
--- a/examples/workspace/schemas/create_schemas.py
+++ b/examples/workspace/schemas/create_schemas.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-new_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=new_catalog.name)
+created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name)
 
 # cleanup
 w.catalogs.delete(name=new_catalog.name, force=True)
diff --git a/examples/workspace/schemas/create_shares.py b/examples/workspace/schemas/create_shares.py
index 88dfa1bcc..0c9f450c9 100755
--- a/examples/workspace/schemas/create_shares.py
+++ b/examples/workspace/schemas/create_shares.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 # cleanup
 w.catalogs.delete(name=created_catalog.name, force=True)
diff --git a/examples/workspace/schemas/create_tables.py b/examples/workspace/schemas/create_tables.py
index 88dfa1bcc..0c9f450c9 100755
--- a/examples/workspace/schemas/create_tables.py
+++ b/examples/workspace/schemas/create_tables.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 # cleanup
 w.catalogs.delete(name=created_catalog.name, force=True)
diff --git a/examples/workspace/schemas/create_volumes.py b/examples/workspace/schemas/create_volumes.py
index 88dfa1bcc..0c9f450c9 100755
--- a/examples/workspace/schemas/create_volumes.py
+++ b/examples/workspace/schemas/create_volumes.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 # cleanup
 w.catalogs.delete(name=created_catalog.name, force=True)
diff --git a/examples/workspace/schemas/get_schemas.py b/examples/workspace/schemas/get_schemas.py
index 2b9c0a582..d549923ac 100755
--- a/examples/workspace/schemas/get_schemas.py
+++ b/examples/workspace/schemas/get_schemas.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-new_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=new_catalog.name)
+created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name)
 
 _ = w.schemas.get(full_name=created.full_name)
 
diff --git a/examples/workspace/schemas/list_schemas.py b/examples/workspace/schemas/list_schemas.py
index 7939a4411..16e7f5788 100755
--- a/examples/workspace/schemas/list_schemas.py
+++ b/examples/workspace/schemas/list_schemas.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-new_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 all = w.schemas.list(catalog_name=new_catalog.name)
 
diff --git a/examples/workspace/schemas/update_schemas.py b/examples/workspace/schemas/update_schemas.py
index b95a7b1ce..f642c7f1f 100755
--- a/examples/workspace/schemas/update_schemas.py
+++ b/examples/workspace/schemas/update_schemas.py
@@ -4,11 +4,11 @@
 
 w = WorkspaceClient()
 
-new_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=new_catalog.name)
+created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name)
 
-_ = w.schemas.update(full_name=created.full_name, comment=f'sdk-{time.time_ns()}')
+_ = w.schemas.update(full_name=created.full_name, comment=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.catalogs.delete(name=new_catalog.name, force=True)
diff --git a/examples/workspace/secrets/create_scope_secrets.py b/examples/workspace/secrets/create_scope_secrets.py
index f881d0111..70ad4fc59 100755
--- a/examples/workspace/secrets/create_scope_secrets.py
+++ b/examples/workspace/secrets/create_scope_secrets.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-key_name = f'sdk-{time.time_ns()}'
+key_name = f"sdk-{time.time_ns()}"
 
-scope_name = f'sdk-{time.time_ns()}'
+scope_name = f"sdk-{time.time_ns()}"
 
 w.secrets.create_scope(scope=scope_name)
 
diff --git a/examples/workspace/secrets/list_acls_secrets.py b/examples/workspace/secrets/list_acls_secrets.py
index 97873f187..a6c62c4bf 100755
--- a/examples/workspace/secrets/list_acls_secrets.py
+++ b/examples/workspace/secrets/list_acls_secrets.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-key_name = f'sdk-{time.time_ns()}'
+key_name = f"sdk-{time.time_ns()}"
 
-scope_name = f'sdk-{time.time_ns()}'
+scope_name = f"sdk-{time.time_ns()}"
 
 w.secrets.create_scope(scope=scope_name)
 
diff --git a/examples/workspace/secrets/list_secrets_secrets.py b/examples/workspace/secrets/list_secrets_secrets.py
index 29acb4d1c..27c215c9c 100755
--- a/examples/workspace/secrets/list_secrets_secrets.py
+++ b/examples/workspace/secrets/list_secrets_secrets.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-key_name = f'sdk-{time.time_ns()}'
+key_name = f"sdk-{time.time_ns()}"
 
-scope_name = f'sdk-{time.time_ns()}'
+scope_name = f"sdk-{time.time_ns()}"
 
 w.secrets.create_scope(scope=scope_name)
 
diff --git a/examples/workspace/secrets/put_acl_secrets.py b/examples/workspace/secrets/put_acl_secrets.py
index ffc83fa57..11807e2ab 100755
--- a/examples/workspace/secrets/put_acl_secrets.py
+++ b/examples/workspace/secrets/put_acl_secrets.py
@@ -5,15 +5,19 @@
 
 w = WorkspaceClient()
 
-key_name = f'sdk-{time.time_ns()}'
+key_name = f"sdk-{time.time_ns()}"
 
-group = w.groups.create(display_name=f'sdk-{time.time_ns()}')
+group = w.groups.create(display_name=f"sdk-{time.time_ns()}")
 
-scope_name = f'sdk-{time.time_ns()}'
+scope_name = f"sdk-{time.time_ns()}"
 
 w.secrets.create_scope(scope=scope_name)
 
-w.secrets.put_acl(scope=scope_name, permission=workspace.AclPermission.MANAGE, principal=group.display_name)
+w.secrets.put_acl(
+    scope=scope_name,
+    permission=workspace.AclPermission.MANAGE,
+    principal=group.display_name,
+)
 
 # cleanup
 w.groups.delete(id=group.id)
diff --git a/examples/workspace/secrets/put_secret_secrets.py b/examples/workspace/secrets/put_secret_secrets.py
index 233e31740..47b2c6f39 100755
--- a/examples/workspace/secrets/put_secret_secrets.py
+++ b/examples/workspace/secrets/put_secret_secrets.py
@@ -4,13 +4,13 @@
 
 w = WorkspaceClient()
 
-key_name = f'sdk-{time.time_ns()}'
+key_name = f"sdk-{time.time_ns()}"
 
-scope_name = f'sdk-{time.time_ns()}'
+scope_name = f"sdk-{time.time_ns()}"
 
 w.secrets.create_scope(scope=scope_name)
 
-w.secrets.put_secret(scope=scope_name, key=key_name, string_value=f'sdk-{time.time_ns()}')
+w.secrets.put_secret(scope=scope_name, key=key_name, string_value=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.secrets.delete_secret(scope=scope_name, key=key_name)
diff --git a/examples/workspace/service_principals/create_create_obo_token_on_aws.py b/examples/workspace/service_principals/create_create_obo_token_on_aws.py
index 9a8ba9b5e..6d7e1dbc0 100755
--- a/examples/workspace/service_principals/create_create_obo_token_on_aws.py
+++ b/examples/workspace/service_principals/create_create_obo_token_on_aws.py
@@ -7,8 +7,10 @@
 
 groups = w.groups.group_display_name_to_id_map(iam.ListGroupsRequest())
 
-spn = w.service_principals.create(display_name=f'sdk-{time.time_ns()}',
-                                  groups=[iam.ComplexValue(value=groups["admins"])])
+spn = w.service_principals.create(
+    display_name=f"sdk-{time.time_ns()}",
+    groups=[iam.ComplexValue(value=groups["admins"])],
+)
 
 # cleanup
 w.service_principals.delete(id=spn.id)
diff --git a/examples/workspace/service_principals/create_service_principals_on_aws.py b/examples/workspace/service_principals/create_service_principals_on_aws.py
index 8d0923cfd..f2fa023c5 100755
--- a/examples/workspace/service_principals/create_service_principals_on_aws.py
+++ b/examples/workspace/service_principals/create_service_principals_on_aws.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.service_principals.delete(id=created.id)
diff --git a/examples/workspace/service_principals/get_service_principals_on_aws.py b/examples/workspace/service_principals/get_service_principals_on_aws.py
index 28cd35a21..13588fc68 100755
--- a/examples/workspace/service_principals/get_service_principals_on_aws.py
+++ b/examples/workspace/service_principals/get_service_principals_on_aws.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}")
 
 by_id = w.service_principals.get(id=created.id)
 
diff --git a/examples/workspace/service_principals/patch_service_principals_on_aws.py b/examples/workspace/service_principals/patch_service_principals_on_aws.py
index 7f11cd222..2a6c95aca 100755
--- a/examples/workspace/service_principals/patch_service_principals_on_aws.py
+++ b/examples/workspace/service_principals/patch_service_principals_on_aws.py
@@ -5,13 +5,15 @@
 
 w = WorkspaceClient()
 
-created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}")
 
 by_id = w.service_principals.get(id=created.id)
 
-w.service_principals.patch(id=by_id.id,
-                           operations=[iam.Patch(op=iam.PatchOp.REPLACE, path="active", value="false")],
-                           schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP])
+w.service_principals.patch(
+    id=by_id.id,
+    operations=[iam.Patch(op=iam.PatchOp.REPLACE, path="active", value="false")],
+    schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
+)
 
 # cleanup
 w.service_principals.delete(id=created.id)
diff --git a/examples/workspace/service_principals/update_service_principals_on_aws.py b/examples/workspace/service_principals/update_service_principals_on_aws.py
index 9c9c0a953..dfd0632da 100755
--- a/examples/workspace/service_principals/update_service_principals_on_aws.py
+++ b/examples/workspace/service_principals/update_service_principals_on_aws.py
@@ -5,11 +5,13 @@
 
 w = WorkspaceClient()
 
-created = w.service_principals.create(display_name=f'sdk-{time.time_ns()}')
+created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}")
 
-w.service_principals.update(id=created.id,
-                            display_name=f'sdk-{time.time_ns()}',
-                            roles=[iam.ComplexValue(value="xyz")])
+w.service_principals.update(
+    id=created.id,
+    display_name=f"sdk-{time.time_ns()}",
+    roles=[iam.ComplexValue(value="xyz")],
+)
 
 # cleanup
 w.service_principals.delete(id=created.id)
diff --git a/examples/workspace/shares/create_shares.py b/examples/workspace/shares/create_shares.py
index ae491e561..df37ed035 100755
--- a/examples/workspace/shares/create_shares.py
+++ b/examples/workspace/shares/create_shares.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created_share = w.shares.create(name=f'sdk-{time.time_ns()}')
+created_share = w.shares.create(name=f"sdk-{time.time_ns()}")
 
 # cleanup
 w.shares.delete(name=created_share.name)
diff --git a/examples/workspace/shares/get_shares.py b/examples/workspace/shares/get_shares.py
index a010bc753..bb466b6f3 100755
--- a/examples/workspace/shares/get_shares.py
+++ b/examples/workspace/shares/get_shares.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created_share = w.shares.create(name=f'sdk-{time.time_ns()}')
+created_share = w.shares.create(name=f"sdk-{time.time_ns()}")
 
 _ = w.shares.get(name=created_share.name)
 
diff --git a/examples/workspace/shares/update_shares.py b/examples/workspace/shares/update_shares.py
index 7662cb0b2..335082377 100755
--- a/examples/workspace/shares/update_shares.py
+++ b/examples/workspace/shares/update_shares.py
@@ -6,29 +6,36 @@
 
 w = WorkspaceClient()
 
-table_name = f'sdk-{time.time_ns()}'
+table_name = f"sdk-{time.time_ns()}"
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 _ = w.statement_execution.execute(
     warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
     catalog=created_catalog.name,
     schema=created_schema.name,
-    statement="CREATE TABLE %s TBLPROPERTIES (delta.enableDeletionVectors=false) AS SELECT 2+2 as four" %
-    (table_name)).result()
-
-table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name)
-
-created_share = w.shares.create(name=f'sdk-{time.time_ns()}')
-
-_ = w.shares.update(name=created_share.name,
-                    updates=[
-                        sharing.SharedDataObjectUpdate(action=sharing.SharedDataObjectUpdateAction.ADD,
-                                                       data_object=sharing.SharedDataObject(
-                                                           name=table_full_name, data_object_type="TABLE"))
-                    ])
+    statement="CREATE TABLE %s TBLPROPERTIES (delta.enableDeletionVectors=false) AS SELECT 2+2 as four" % (table_name),
+).result()
+
+table_full_name = "%s.%s.%s" % (
+    created_catalog.name,
+    created_schema.name,
+    table_name,
+)
+
+created_share = w.shares.create(name=f"sdk-{time.time_ns()}")
+
+_ = w.shares.update(
+    name=created_share.name,
+    updates=[
+        sharing.SharedDataObjectUpdate(
+            action=sharing.SharedDataObjectUpdateAction.ADD,
+            data_object=sharing.SharedDataObject(name=table_full_name, data_object_type="TABLE"),
+        )
+    ],
+)
 
 # cleanup
 w.schemas.delete(full_name=created_schema.full_name)
diff --git a/examples/workspace/statement_execution/execute_shares.py b/examples/workspace/statement_execution/execute_shares.py
index 8085a6a08..889932c12 100755
--- a/examples/workspace/statement_execution/execute_shares.py
+++ b/examples/workspace/statement_execution/execute_shares.py
@@ -5,18 +5,18 @@
 
 w = WorkspaceClient()
 
-table_name = f'sdk-{time.time_ns()}'
+table_name = f"sdk-{time.time_ns()}"
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 _ = w.statement_execution.execute(
     warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
     catalog=created_catalog.name,
     schema=created_schema.name,
-    statement="CREATE TABLE %s TBLPROPERTIES (delta.enableDeletionVectors=false) AS SELECT 2+2 as four" %
-    (table_name)).result()
+    statement="CREATE TABLE %s TBLPROPERTIES (delta.enableDeletionVectors=false) AS SELECT 2+2 as four" % (table_name),
+).result()
 
 # cleanup
 w.schemas.delete(full_name=created_schema.full_name)
diff --git a/examples/workspace/statement_execution/execute_tables.py b/examples/workspace/statement_execution/execute_tables.py
index 96e8b8d2e..3f02905ad 100755
--- a/examples/workspace/statement_execution/execute_tables.py
+++ b/examples/workspace/statement_execution/execute_tables.py
@@ -5,16 +5,18 @@
 
 w = WorkspaceClient()
 
-table_name = f'sdk-{time.time_ns()}'
+table_name = f"sdk-{time.time_ns()}"
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
-_ = w.statement_execution.execute(warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
-                                  catalog=created_catalog.name,
-                                  schema=created_schema.name,
-                                  statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name)).result()
+_ = w.statement_execution.execute(
+    warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
+    catalog=created_catalog.name,
+    schema=created_schema.name,
+    statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name),
+).result()
 
 # cleanup
 w.schemas.delete(full_name=created_schema.full_name)
diff --git a/examples/workspace/storage_credentials/create_external_locations.py b/examples/workspace/storage_credentials/create_external_locations.py
index 4c9773506..96c58d9a7 100755
--- a/examples/workspace/storage_credentials/create_external_locations.py
+++ b/examples/workspace/storage_credentials/create_external_locations.py
@@ -7,8 +7,9 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 # cleanup
 w.storage_credentials.delete(delete=credential.name)
diff --git a/examples/workspace/storage_credentials/create_external_locations_on_aws.py b/examples/workspace/storage_credentials/create_external_locations_on_aws.py
index 487466003..ad53bbbec 100755
--- a/examples/workspace/storage_credentials/create_external_locations_on_aws.py
+++ b/examples/workspace/storage_credentials/create_external_locations_on_aws.py
@@ -7,8 +7,9 @@
 w = WorkspaceClient()
 
 credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 # cleanup
 w.storage_credentials.delete(name=credential.name)
diff --git a/examples/workspace/storage_credentials/create_storage_credentials.py b/examples/workspace/storage_credentials/create_storage_credentials.py
index d39ad0027..4c00ed68b 100755
--- a/examples/workspace/storage_credentials/create_storage_credentials.py
+++ b/examples/workspace/storage_credentials/create_storage_credentials.py
@@ -7,8 +7,9 @@
 w = WorkspaceClient()
 
 created = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 # cleanup
 w.storage_credentials.delete(delete=created.name)
diff --git a/examples/workspace/storage_credentials/create_storage_credentials_on_aws.py b/examples/workspace/storage_credentials/create_storage_credentials_on_aws.py
index b2af34c7e..a6a0c676f 100755
--- a/examples/workspace/storage_credentials/create_storage_credentials_on_aws.py
+++ b/examples/workspace/storage_credentials/create_storage_credentials_on_aws.py
@@ -7,8 +7,9 @@
 w = WorkspaceClient()
 
 created = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 # cleanup
 w.storage_credentials.delete(name=created.name)
diff --git a/examples/workspace/storage_credentials/create_volumes.py b/examples/workspace/storage_credentials/create_volumes.py
index bf7bd42f1..cecfd326a 100755
--- a/examples/workspace/storage_credentials/create_volumes.py
+++ b/examples/workspace/storage_credentials/create_volumes.py
@@ -7,9 +7,10 @@
 w = WorkspaceClient()
 
 storage_credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
-    comment="created via SDK")
+    comment="created via SDK",
+)
 
 # cleanup
 w.storage_credentials.delete(name=storage_credential.name)
diff --git a/examples/workspace/storage_credentials/get_storage_credentials.py b/examples/workspace/storage_credentials/get_storage_credentials.py
index 036feee66..f16abb2d9 100755
--- a/examples/workspace/storage_credentials/get_storage_credentials.py
+++ b/examples/workspace/storage_credentials/get_storage_credentials.py
@@ -7,8 +7,9 @@
 w = WorkspaceClient()
 
 created = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 by_name = w.storage_credentials.get(get=created.name)
 
diff --git a/examples/workspace/storage_credentials/get_storage_credentials_on_aws.py b/examples/workspace/storage_credentials/get_storage_credentials_on_aws.py
index 9f9a8380a..2446d1767 100755
--- a/examples/workspace/storage_credentials/get_storage_credentials_on_aws.py
+++ b/examples/workspace/storage_credentials/get_storage_credentials_on_aws.py
@@ -7,8 +7,9 @@
 w = WorkspaceClient()
 
 created = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 by_name = w.storage_credentials.get(name=created.name)
 
diff --git a/examples/workspace/storage_credentials/update_storage_credentials.py b/examples/workspace/storage_credentials/update_storage_credentials.py
index ced265c53..942eb055e 100755
--- a/examples/workspace/storage_credentials/update_storage_credentials.py
+++ b/examples/workspace/storage_credentials/update_storage_credentials.py
@@ -7,13 +7,15 @@
 w = WorkspaceClient()
 
 created = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 _ = w.storage_credentials.update(
     name=created.name,
-    comment=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    comment=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 # cleanup
 w.storage_credentials.delete(delete=created.name)
diff --git a/examples/workspace/storage_credentials/update_storage_credentials_on_aws.py b/examples/workspace/storage_credentials/update_storage_credentials_on_aws.py
index bd5dcd829..1bb906164 100755
--- a/examples/workspace/storage_credentials/update_storage_credentials_on_aws.py
+++ b/examples/workspace/storage_credentials/update_storage_credentials_on_aws.py
@@ -7,13 +7,15 @@
 w = WorkspaceClient()
 
 created = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    name=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 _ = w.storage_credentials.update(
     name=created.name,
-    comment=f'sdk-{time.time_ns()}',
-    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]))
+    comment=f"sdk-{time.time_ns()}",
+    aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
+)
 
 # cleanup
 w.storage_credentials.delete(name=created.name)
diff --git a/examples/workspace/tables/get_tables.py b/examples/workspace/tables/get_tables.py
index 7c81faf6d..25028b0ab 100755
--- a/examples/workspace/tables/get_tables.py
+++ b/examples/workspace/tables/get_tables.py
@@ -5,18 +5,24 @@
 
 w = WorkspaceClient()
 
-table_name = f'sdk-{time.time_ns()}'
+table_name = f"sdk-{time.time_ns()}"
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
-_ = w.statement_execution.execute(warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
-                                  catalog=created_catalog.name,
-                                  schema=created_schema.name,
-                                  statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name)).result()
+_ = w.statement_execution.execute(
+    warehouse_id=os.environ["TEST_DEFAULT_WAREHOUSE_ID"],
+    catalog=created_catalog.name,
+    schema=created_schema.name,
+    statement="CREATE TABLE %s AS SELECT 2+2 as four" % (table_name),
+).result()
 
-table_full_name = "%s.%s.%s" % (created_catalog.name, created_schema.name, table_name)
+table_full_name = "%s.%s.%s" % (
+    created_catalog.name,
+    created_schema.name,
+    table_name,
+)
 
 created_table = w.tables.get(full_name=table_full_name)
 
diff --git a/examples/workspace/tables/list_summaries_tables.py b/examples/workspace/tables/list_summaries_tables.py
index d3e64fd0a..87abeebf5 100755
--- a/examples/workspace/tables/list_summaries_tables.py
+++ b/examples/workspace/tables/list_summaries_tables.py
@@ -4,12 +4,11 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
-summaries = w.tables.list_summaries(catalog_name=created_catalog.name,
-                                    schema_name_pattern=created_schema.name)
+summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name)
 
 # cleanup
 w.schemas.delete(full_name=created_schema.full_name)
diff --git a/examples/workspace/tables/list_tables.py b/examples/workspace/tables/list_tables.py
index 6c14faa12..2f41d8829 100755
--- a/examples/workspace/tables/list_tables.py
+++ b/examples/workspace/tables/list_tables.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name)
 
diff --git a/examples/workspace/token_management/create_obo_token_create_obo_token_on_aws.py b/examples/workspace/token_management/create_obo_token_create_obo_token_on_aws.py
index 881827b8f..408af62cf 100755
--- a/examples/workspace/token_management/create_obo_token_create_obo_token_on_aws.py
+++ b/examples/workspace/token_management/create_obo_token_create_obo_token_on_aws.py
@@ -7,8 +7,10 @@
 
 groups = w.groups.group_display_name_to_id_map(iam.ListGroupsRequest())
 
-spn = w.service_principals.create(display_name=f'sdk-{time.time_ns()}',
-                                  groups=[iam.ComplexValue(value=groups["admins"])])
+spn = w.service_principals.create(
+    display_name=f"sdk-{time.time_ns()}",
+    groups=[iam.ComplexValue(value=groups["admins"])],
+)
 
 obo = w.token_management.create_obo_token(application_id=spn.application_id, lifetime_seconds=60)
 
diff --git a/examples/workspace/token_management/get_create_obo_token_on_aws.py b/examples/workspace/token_management/get_create_obo_token_on_aws.py
index d47d60a30..d90640ea2 100755
--- a/examples/workspace/token_management/get_create_obo_token_on_aws.py
+++ b/examples/workspace/token_management/get_create_obo_token_on_aws.py
@@ -7,8 +7,10 @@
 
 groups = w.groups.group_display_name_to_id_map(iam.ListGroupsRequest())
 
-spn = w.service_principals.create(display_name=f'sdk-{time.time_ns()}',
-                                  groups=[iam.ComplexValue(value=groups["admins"])])
+spn = w.service_principals.create(
+    display_name=f"sdk-{time.time_ns()}",
+    groups=[iam.ComplexValue(value=groups["admins"])],
+)
 
 obo = w.token_management.create_obo_token(application_id=spn.application_id, lifetime_seconds=60)
 
diff --git a/examples/workspace/tokens/create_tokens.py b/examples/workspace/tokens/create_tokens.py
index e7545d2ed..e218fd9af 100755
--- a/examples/workspace/tokens/create_tokens.py
+++ b/examples/workspace/tokens/create_tokens.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-token = w.tokens.create(comment=f'sdk-{time.time_ns()}', lifetime_seconds=300)
+token = w.tokens.create(comment=f"sdk-{time.time_ns()}", lifetime_seconds=300)
 
 # cleanup
 w.tokens.delete(token_id=token.token_info.token_id)
diff --git a/examples/workspace/tokens/get_tokens.py b/examples/workspace/tokens/get_tokens.py
index fade24f21..91a8f88d0 100755
--- a/examples/workspace/tokens/get_tokens.py
+++ b/examples/workspace/tokens/get_tokens.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-token = w.tokens.create(comment=f'sdk-{time.time_ns()}', lifetime_seconds=300)
+token = w.tokens.create(comment=f"sdk-{time.time_ns()}", lifetime_seconds=300)
 
 by_name = w.tokens.get(comment=token.token_info.comment)
 
diff --git a/examples/workspace/users/create_clusters_api_integration.py b/examples/workspace/users/create_clusters_api_integration.py
index 020c18744..66da6c5e4 100755
--- a/examples/workspace/users/create_clusters_api_integration.py
+++ b/examples/workspace/users/create_clusters_api_integration.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com')
+other_owner = w.users.create(user_name=f"sdk-{time.time_ns()}@example.com")
 
 # cleanup
 w.users.delete(id=other_owner.id)
diff --git a/examples/workspace/users/create_users.py b/examples/workspace/users/create_users.py
index 4a348cf32..5113f511d 100755
--- a/examples/workspace/users/create_users.py
+++ b/examples/workspace/users/create_users.py
@@ -4,4 +4,7 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
diff --git a/examples/workspace/users/create_workspace_users.py b/examples/workspace/users/create_workspace_users.py
index 4a348cf32..5113f511d 100755
--- a/examples/workspace/users/create_workspace_users.py
+++ b/examples/workspace/users/create_workspace_users.py
@@ -4,4 +4,7 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
diff --git a/examples/workspace/users/delete_clusters_api_integration.py b/examples/workspace/users/delete_clusters_api_integration.py
index fecdb0364..39dc250a4 100755
--- a/examples/workspace/users/delete_clusters_api_integration.py
+++ b/examples/workspace/users/delete_clusters_api_integration.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-other_owner = w.users.create(user_name=f'sdk-{time.time_ns()}@example.com')
+other_owner = w.users.create(user_name=f"sdk-{time.time_ns()}@example.com")
 
 w.users.delete(id=other_owner.id)
diff --git a/examples/workspace/users/delete_users.py b/examples/workspace/users/delete_users.py
index 81004b1a1..b9316147d 100755
--- a/examples/workspace/users/delete_users.py
+++ b/examples/workspace/users/delete_users.py
@@ -4,6 +4,9 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 w.users.delete(id=user.id)
diff --git a/examples/workspace/users/delete_workspace_users.py b/examples/workspace/users/delete_workspace_users.py
index 81004b1a1..b9316147d 100755
--- a/examples/workspace/users/delete_workspace_users.py
+++ b/examples/workspace/users/delete_workspace_users.py
@@ -4,6 +4,9 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 w.users.delete(id=user.id)
diff --git a/examples/workspace/users/get_users.py b/examples/workspace/users/get_users.py
index 55919ff73..d7bc5d67a 100755
--- a/examples/workspace/users/get_users.py
+++ b/examples/workspace/users/get_users.py
@@ -4,6 +4,9 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 fetch = w.users.get(id=user.id)
diff --git a/examples/workspace/users/get_workspace_users.py b/examples/workspace/users/get_workspace_users.py
index 55919ff73..d7bc5d67a 100755
--- a/examples/workspace/users/get_workspace_users.py
+++ b/examples/workspace/users/get_workspace_users.py
@@ -4,6 +4,9 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 fetch = w.users.get(id=user.id)
diff --git a/examples/workspace/users/list_users.py b/examples/workspace/users/list_users.py
index fbc477cbd..a4eccfd31 100755
--- a/examples/workspace/users/list_users.py
+++ b/examples/workspace/users/list_users.py
@@ -3,6 +3,8 @@
 
 w = WorkspaceClient()
 
-all_users = w.users.list(attributes="id,userName",
-                         sort_by="userName",
-                         sort_order=iam.ListSortOrder.DESCENDING)
+all_users = w.users.list(
+    attributes="id,userName",
+    sort_by="userName",
+    sort_order=iam.ListSortOrder.DESCENDING,
+)
diff --git a/examples/workspace/users/list_workspace_users.py b/examples/workspace/users/list_workspace_users.py
index fbc477cbd..a4eccfd31 100755
--- a/examples/workspace/users/list_workspace_users.py
+++ b/examples/workspace/users/list_workspace_users.py
@@ -3,6 +3,8 @@
 
 w = WorkspaceClient()
 
-all_users = w.users.list(attributes="id,userName",
-                         sort_by="userName",
-                         sort_order=iam.ListSortOrder.DESCENDING)
+all_users = w.users.list(
+    attributes="id,userName",
+    sort_by="userName",
+    sort_order=iam.ListSortOrder.DESCENDING,
+)
diff --git a/examples/workspace/users/patch_workspace_users.py b/examples/workspace/users/patch_workspace_users.py
index b5618b2f5..68632e24a 100755
--- a/examples/workspace/users/patch_workspace_users.py
+++ b/examples/workspace/users/patch_workspace_users.py
@@ -5,8 +5,13 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
-w.users.patch(id=user.id,
-              operations=[iam.Patch(op=iam.PatchOp.REPLACE, path="active", value="false")],
-              schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP])
+w.users.patch(
+    id=user.id,
+    operations=[iam.Patch(op=iam.PatchOp.REPLACE, path="active", value="false")],
+    schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP],
+)
diff --git a/examples/workspace/users/update_workspace_users.py b/examples/workspace/users/update_workspace_users.py
index 843d3cbc1..d4af7f7af 100755
--- a/examples/workspace/users/update_workspace_users.py
+++ b/examples/workspace/users/update_workspace_users.py
@@ -4,6 +4,9 @@
 
 w = WorkspaceClient()
 
-user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
+user = w.users.create(
+    display_name=f"sdk-{time.time_ns()}",
+    user_name=f"sdk-{time.time_ns()}@example.com",
+)
 
 w.users.update(id=user.id, user_name=user.user_name, active=True)
diff --git a/examples/workspace/volumes/create_volumes.py b/examples/workspace/volumes/create_volumes.py
index b6814e93f..33148f2fd 100755
--- a/examples/workspace/volumes/create_volumes.py
+++ b/examples/workspace/volumes/create_volumes.py
@@ -7,25 +7,29 @@
 w = WorkspaceClient()
 
 storage_credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
-    comment="created via SDK")
-
-external_location = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                                credential_name=storage_credential.name,
-                                                comment="created via SDK",
-                                                url="s3://" + os.environ["TEST_BUCKET"] + "/" +
-                                                f'sdk-{time.time_ns()}')
-
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
-
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
-
-created_volume = w.volumes.create(catalog_name=created_catalog.name,
-                                  schema_name=created_schema.name,
-                                  name=f'sdk-{time.time_ns()}',
-                                  storage_location=external_location.url,
-                                  volume_type=catalog.VolumeType.EXTERNAL)
+    comment="created via SDK",
+)
+
+external_location = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=storage_credential.name,
+    comment="created via SDK",
+    url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
+)
+
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
+
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
+
+created_volume = w.volumes.create(
+    catalog_name=created_catalog.name,
+    schema_name=created_schema.name,
+    name=f"sdk-{time.time_ns()}",
+    storage_location=external_location.url,
+    volume_type=catalog.VolumeType.EXTERNAL,
+)
 
 # cleanup
 w.storage_credentials.delete(name=storage_credential.name)
diff --git a/examples/workspace/volumes/list_volumes.py b/examples/workspace/volumes/list_volumes.py
index 32e39912b..b82ce040a 100755
--- a/examples/workspace/volumes/list_volumes.py
+++ b/examples/workspace/volumes/list_volumes.py
@@ -4,9 +4,9 @@
 
 w = WorkspaceClient()
 
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
 
 all_volumes = w.volumes.list(catalog_name=created_catalog.name, schema_name=created_schema.name)
 
diff --git a/examples/workspace/volumes/read_volumes.py b/examples/workspace/volumes/read_volumes.py
index 585ace8ab..b7c2452eb 100755
--- a/examples/workspace/volumes/read_volumes.py
+++ b/examples/workspace/volumes/read_volumes.py
@@ -7,25 +7,29 @@
 w = WorkspaceClient()
 
 storage_credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
-    comment="created via SDK")
-
-external_location = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                                credential_name=storage_credential.name,
-                                                comment="created via SDK",
-                                                url="s3://" + os.environ["TEST_BUCKET"] + "/" +
-                                                f'sdk-{time.time_ns()}')
-
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
-
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
-
-created_volume = w.volumes.create(catalog_name=created_catalog.name,
-                                  schema_name=created_schema.name,
-                                  name=f'sdk-{time.time_ns()}',
-                                  storage_location=external_location.url,
-                                  volume_type=catalog.VolumeType.EXTERNAL)
+    comment="created via SDK",
+)
+
+external_location = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=storage_credential.name,
+    comment="created via SDK",
+    url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
+)
+
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
+
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
+
+created_volume = w.volumes.create(
+    catalog_name=created_catalog.name,
+    schema_name=created_schema.name,
+    name=f"sdk-{time.time_ns()}",
+    storage_location=external_location.url,
+    volume_type=catalog.VolumeType.EXTERNAL,
+)
 
 loaded_volume = w.volumes.read(name=created_volume.full_name)
 
diff --git a/examples/workspace/volumes/update_volumes.py b/examples/workspace/volumes/update_volumes.py
index 68f96282d..ed045d5b8 100755
--- a/examples/workspace/volumes/update_volumes.py
+++ b/examples/workspace/volumes/update_volumes.py
@@ -7,25 +7,29 @@
 w = WorkspaceClient()
 
 storage_credential = w.storage_credentials.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
-    comment="created via SDK")
-
-external_location = w.external_locations.create(name=f'sdk-{time.time_ns()}',
-                                                credential_name=storage_credential.name,
-                                                comment="created via SDK",
-                                                url="s3://" + os.environ["TEST_BUCKET"] + "/" +
-                                                f'sdk-{time.time_ns()}')
-
-created_catalog = w.catalogs.create(name=f'sdk-{time.time_ns()}')
-
-created_schema = w.schemas.create(name=f'sdk-{time.time_ns()}', catalog_name=created_catalog.name)
-
-created_volume = w.volumes.create(catalog_name=created_catalog.name,
-                                  schema_name=created_schema.name,
-                                  name=f'sdk-{time.time_ns()}',
-                                  storage_location=external_location.url,
-                                  volume_type=catalog.VolumeType.EXTERNAL)
+    comment="created via SDK",
+)
+
+external_location = w.external_locations.create(
+    name=f"sdk-{time.time_ns()}",
+    credential_name=storage_credential.name,
+    comment="created via SDK",
+    url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}",
+)
+
+created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}")
+
+created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)
+
+created_volume = w.volumes.create(
+    catalog_name=created_catalog.name,
+    schema_name=created_schema.name,
+    name=f"sdk-{time.time_ns()}",
+    storage_location=external_location.url,
+    volume_type=catalog.VolumeType.EXTERNAL,
+)
 
 loaded_volume = w.volumes.read(name=created_volume.full_name)
 
diff --git a/examples/workspace/warehouses/create_sql_warehouses.py b/examples/workspace/warehouses/create_sql_warehouses.py
index f01b9d5f9..0f69e498d 100755
--- a/examples/workspace/warehouses/create_sql_warehouses.py
+++ b/examples/workspace/warehouses/create_sql_warehouses.py
@@ -6,13 +6,14 @@
 w = WorkspaceClient()
 
 created = w.warehouses.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     cluster_size="2X-Small",
     max_num_clusters=1,
     auto_stop_mins=10,
     tags=sql.EndpointTags(
-        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
-                     ])).result()
+        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")]
+    ),
+).result()
 
 # cleanup
 w.warehouses.delete(id=created.id)
diff --git a/examples/workspace/warehouses/edit_sql_warehouses.py b/examples/workspace/warehouses/edit_sql_warehouses.py
index acf06035a..0cd7cfaba 100755
--- a/examples/workspace/warehouses/edit_sql_warehouses.py
+++ b/examples/workspace/warehouses/edit_sql_warehouses.py
@@ -6,19 +6,22 @@
 w = WorkspaceClient()
 
 created = w.warehouses.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     cluster_size="2X-Small",
     max_num_clusters=1,
     auto_stop_mins=10,
     tags=sql.EndpointTags(
-        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
-                     ])).result()
+        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")]
+    ),
+).result()
 
-_ = w.warehouses.edit(id=created.id,
-                      name=f'sdk-{time.time_ns()}',
-                      cluster_size="2X-Small",
-                      max_num_clusters=1,
-                      auto_stop_mins=10)
+_ = w.warehouses.edit(
+    id=created.id,
+    name=f"sdk-{time.time_ns()}",
+    cluster_size="2X-Small",
+    max_num_clusters=1,
+    auto_stop_mins=10,
+)
 
 # cleanup
 w.warehouses.delete(id=created.id)
diff --git a/examples/workspace/warehouses/get_sql_warehouses.py b/examples/workspace/warehouses/get_sql_warehouses.py
index 9f8184ab7..0e6ce1b3d 100755
--- a/examples/workspace/warehouses/get_sql_warehouses.py
+++ b/examples/workspace/warehouses/get_sql_warehouses.py
@@ -6,13 +6,14 @@
 w = WorkspaceClient()
 
 created = w.warehouses.create(
-    name=f'sdk-{time.time_ns()}',
+    name=f"sdk-{time.time_ns()}",
     cluster_size="2X-Small",
     max_num_clusters=1,
     auto_stop_mins=10,
     tags=sql.EndpointTags(
-        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")
-                     ])).result()
+        custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com")]
+    ),
+).result()
 
 wh = w.warehouses.get(id=created.id)
 
diff --git a/examples/workspace/workspace/download_file.py b/examples/workspace/workspace/download_file.py
index 6ec1a7fa4..dd4a6625c 100644
--- a/examples/workspace/workspace/download_file.py
+++ b/examples/workspace/workspace/download_file.py
@@ -6,11 +6,11 @@
 
 w = WorkspaceClient()
 
-py_file = f'/Users/{w.current_user.me().user_name}/file-{time.time_ns()}.py'
+py_file = f"/Users/{w.current_user.me().user_name}/file-{time.time_ns()}.py"
 
-w.workspace.upload(py_file, io.BytesIO(b'print(1)'), format=ImportFormat.AUTO)
+w.workspace.upload(py_file, io.BytesIO(b"print(1)"), format=ImportFormat.AUTO)
 with w.workspace.download(py_file) as f:
     content = f.read()
-    assert content == b'print(1)'
+    assert content == b"print(1)"
 
-w.workspace.delete(py_file)
\ No newline at end of file
+w.workspace.delete(py_file)
diff --git a/examples/workspace/workspace/export__workspace_integration.py b/examples/workspace/workspace/export__workspace_integration.py
index d7fbde332..31e667def 100755
--- a/examples/workspace/workspace/export__workspace_integration.py
+++ b/examples/workspace/workspace/export__workspace_integration.py
@@ -5,6 +5,6 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook)
diff --git a/examples/workspace/workspace/export_workspace_integration.py b/examples/workspace/workspace/export_workspace_integration.py
index efd497889..60c662c4b 100755
--- a/examples/workspace/workspace/export_workspace_integration.py
+++ b/examples/workspace/workspace/export_workspace_integration.py
@@ -5,6 +5,6 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook)
diff --git a/examples/workspace/workspace/get_status_generic_permissions.py b/examples/workspace/workspace/get_status_generic_permissions.py
index 44723bcbe..6f94a0183 100755
--- a/examples/workspace/workspace/get_status_generic_permissions.py
+++ b/examples/workspace/workspace/get_status_generic_permissions.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 obj = w.workspace.get_status(path=notebook_path)
diff --git a/examples/workspace/workspace/get_status_workspace_integration.py b/examples/workspace/workspace/get_status_workspace_integration.py
index 3ccd249c7..c0f692947 100755
--- a/examples/workspace/workspace/get_status_workspace_integration.py
+++ b/examples/workspace/workspace/get_status_workspace_integration.py
@@ -4,6 +4,6 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 get_status_response = w.workspace.get_status(path=notebook)
diff --git a/examples/workspace/workspace/import__generic_permissions.py b/examples/workspace/workspace/import__generic_permissions.py
index 08604d10f..50bd0683b 100755
--- a/examples/workspace/workspace/import__generic_permissions.py
+++ b/examples/workspace/workspace/import__generic_permissions.py
@@ -6,11 +6,17 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(path=notebook_path,
-                    overwrite=true_,
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.PYTHON,
-                    content=base64.b64encode(("""print(1)
-""").encode()).decode())
+w.workspace.import_(
+    path=notebook_path,
+    overwrite=true_,
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.PYTHON,
+    content=base64.b64encode(
+        (
+            """print(1)
+"""
+        ).encode()
+    ).decode(),
+)
diff --git a/examples/workspace/workspace/import__jobs_api_full_integration.py b/examples/workspace/workspace/import__jobs_api_full_integration.py
index e2cccdea4..e5fd4badc 100755
--- a/examples/workspace/workspace/import__jobs_api_full_integration.py
+++ b/examples/workspace/workspace/import__jobs_api_full_integration.py
@@ -6,13 +6,19 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(path=notebook_path,
-                    overwrite=true_,
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.PYTHON,
-                    content=base64.b64encode(("""import time
+w.workspace.import_(
+    path=notebook_path,
+    overwrite=true_,
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.PYTHON,
+    content=base64.b64encode(
+        (
+            """import time
 time.sleep(10)
 dbutils.notebook.exit('hello')
-""").encode()).decode())
+"""
+        ).encode()
+    ).decode(),
+)
diff --git a/examples/workspace/workspace/import__pipelines.py b/examples/workspace/workspace/import__pipelines.py
index 89765218c..d92698a64 100755
--- a/examples/workspace/workspace/import__pipelines.py
+++ b/examples/workspace/workspace/import__pipelines.py
@@ -6,10 +6,12 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(),
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.SQL,
-                    overwrite=true_,
-                    path=notebook_path)
+w.workspace.import_(
+    content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(),
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.SQL,
+    overwrite=true_,
+    path=notebook_path,
+)
diff --git a/examples/workspace/workspace/import__workspace_integration.py b/examples/workspace/workspace/import__workspace_integration.py
index 1c5fe58a9..94896a66d 100755
--- a/examples/workspace/workspace/import__workspace_integration.py
+++ b/examples/workspace/workspace/import__workspace_integration.py
@@ -6,11 +6,12 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(path=notebook,
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.PYTHON,
-                    content=base64.b64encode(
-                        ("# Databricks notebook source\nprint('hello from job')").encode()).decode(),
-                    overwrite=true_)
+w.workspace.import_(
+    path=notebook,
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.PYTHON,
+    content=base64.b64encode(("# Databricks notebook source\nprint('hello from job')").encode()).decode(),
+    overwrite=true_,
+)
diff --git a/examples/workspace/workspace/import_generic_permissions.py b/examples/workspace/workspace/import_generic_permissions.py
index 885f0f3b0..b6de20027 100755
--- a/examples/workspace/workspace/import_generic_permissions.py
+++ b/examples/workspace/workspace/import_generic_permissions.py
@@ -6,11 +6,17 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(path=notebook_path,
-                    overwrite=True,
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.PYTHON,
-                    content=base64.b64encode(("""print(1)
-""").encode()).decode())
+w.workspace.import_(
+    path=notebook_path,
+    overwrite=True,
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.PYTHON,
+    content=base64.b64encode(
+        (
+            """print(1)
+"""
+        ).encode()
+    ).decode(),
+)
diff --git a/examples/workspace/workspace/import_jobs_api_full_integration.py b/examples/workspace/workspace/import_jobs_api_full_integration.py
index c22159e23..048a8fa42 100755
--- a/examples/workspace/workspace/import_jobs_api_full_integration.py
+++ b/examples/workspace/workspace/import_jobs_api_full_integration.py
@@ -6,13 +6,19 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(path=notebook_path,
-                    overwrite=True,
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.PYTHON,
-                    content=base64.b64encode(("""import time
+w.workspace.import_(
+    path=notebook_path,
+    overwrite=True,
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.PYTHON,
+    content=base64.b64encode(
+        (
+            """import time
 time.sleep(10)
 dbutils.notebook.exit('hello')
-""").encode()).decode())
+"""
+        ).encode()
+    ).decode(),
+)
diff --git a/examples/workspace/workspace/import_pipelines.py b/examples/workspace/workspace/import_pipelines.py
index e0e989b4a..cb5150be7 100755
--- a/examples/workspace/workspace/import_pipelines.py
+++ b/examples/workspace/workspace/import_pipelines.py
@@ -6,10 +6,12 @@
 
 w = WorkspaceClient()
 
-notebook_path = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(),
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.SQL,
-                    overwrite=True,
-                    path=notebook_path)
+w.workspace.import_(
+    content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(),
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.SQL,
+    overwrite=True,
+    path=notebook_path,
+)
diff --git a/examples/workspace/workspace/import_workspace_integration.py b/examples/workspace/workspace/import_workspace_integration.py
index 81c7c41a8..7227adc69 100755
--- a/examples/workspace/workspace/import_workspace_integration.py
+++ b/examples/workspace/workspace/import_workspace_integration.py
@@ -6,11 +6,12 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
-w.workspace.import_(path=notebook,
-                    format=workspace.ImportFormat.SOURCE,
-                    language=workspace.Language.PYTHON,
-                    content=base64.b64encode(
-                        ("# Databricks notebook source\nprint('hello from job')").encode()).decode(),
-                    overwrite=True)
+w.workspace.import_(
+    path=notebook,
+    format=workspace.ImportFormat.SOURCE,
+    language=workspace.Language.PYTHON,
+    content=base64.b64encode(("# Databricks notebook source\nprint('hello from job')").encode()).decode(),
+    overwrite=True,
+)
diff --git a/examples/workspace/workspace/list_recursive.py b/examples/workspace/workspace/list_recursive.py
index 4cdd6e985..471f079f2 100644
--- a/examples/workspace/workspace/list_recursive.py
+++ b/examples/workspace/workspace/list_recursive.py
@@ -3,6 +3,6 @@
 w = WorkspaceClient()
 
 names = []
-for i in w.workspace.list(f'/Users/{w.current_user.me().user_name}', recursive=True):
+for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True):
     names.append(i.path)
-assert len(names) > 0
\ No newline at end of file
+assert len(names) > 0
diff --git a/examples/workspace/workspace/list_workspace_integration.py b/examples/workspace/workspace/list_workspace_integration.py
index 1e664310f..b312a8309 100755
--- a/examples/workspace/workspace/list_workspace_integration.py
+++ b/examples/workspace/workspace/list_workspace_integration.py
@@ -5,6 +5,6 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
+notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
 
 objects = w.workspace.list(path=os.path.dirname(notebook))
diff --git a/examples/workspace/workspace/upload_notebook.py b/examples/workspace/workspace/upload_notebook.py
index 94b23fe55..73c75abed 100644
--- a/examples/workspace/workspace/upload_notebook.py
+++ b/examples/workspace/workspace/upload_notebook.py
@@ -5,11 +5,11 @@
 
 w = WorkspaceClient()
 
-notebook = f'/Users/{w.current_user.me().user_name}/notebook-{time.time_ns()}.py'
+notebook = f"/Users/{w.current_user.me().user_name}/notebook-{time.time_ns()}.py"
 
-w.workspace.upload(notebook, io.BytesIO(b'print(1)'))
+w.workspace.upload(notebook, io.BytesIO(b"print(1)"))
 with w.workspace.download(notebook) as f:
     content = f.read()
-    assert content == b'# Databricks notebook source\nprint(1)'
+    assert content == b"# Databricks notebook source\nprint(1)"
 
-w.workspace.delete(notebook)
\ No newline at end of file
+w.workspace.delete(notebook)
diff --git a/examples/workspace/workspace_bindings/get_catalog_workspace_bindings.py b/examples/workspace/workspace_bindings/get_catalog_workspace_bindings.py
index 651ab75a7..dc9a6917a 100755
--- a/examples/workspace/workspace_bindings/get_catalog_workspace_bindings.py
+++ b/examples/workspace/workspace_bindings/get_catalog_workspace_bindings.py
@@ -4,7 +4,7 @@
 
 w = WorkspaceClient()
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 bindings = w.workspace_bindings.get(name=created.name)
 
diff --git a/examples/workspace/workspace_bindings/update_catalog_workspace_bindings.py b/examples/workspace/workspace_bindings/update_catalog_workspace_bindings.py
index 2737cab31..d10241a53 100755
--- a/examples/workspace/workspace_bindings/update_catalog_workspace_bindings.py
+++ b/examples/workspace/workspace_bindings/update_catalog_workspace_bindings.py
@@ -7,7 +7,7 @@
 
 this_workspace_id = os.environ["THIS_WORKSPACE_ID"]
 
-created = w.catalogs.create(name=f'sdk-{time.time_ns()}')
+created = w.catalogs.create(name=f"sdk-{time.time_ns()}")
 
 _ = w.workspace_bindings.update(name=created.name, assign_workspaces=[this_workspace_id])
 
diff --git a/pyproject.toml b/pyproject.toml
index 92572a44d..d2236d499 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -22,11 +22,11 @@ classifiers = [
     "Programming Language :: Python :: 3.11",
     "Programming Language :: Python :: 3.12",
     "Programming Language :: Python :: 3.13",
-    "Operating System :: OS Independent"
+    "Operating System :: OS Independent",
 ]
 dependencies = [
     "requests>=2.28.1,<3",
-    "google-auth~=2.0"
+    "google-auth~=2.0",
 ]
 
 [project.urls]
@@ -38,7 +38,7 @@ dev = [
     "pytest-cov",
     "pytest-xdist",
     "pytest-mock",
-    "yapf",
+    "black==23.10.0",
     "pycodestyle",
     "autoflake",
     "isort",
@@ -56,12 +56,12 @@ dev = [
 ]
 notebook = [
     "ipython>=8,<9",
-    "ipywidgets>=8,<9"
+    "ipywidgets>=8,<9",
 ]
 openai = [
     "openai",
     'langchain-openai; python_version > "3.7"',
-    "httpx"
+    "httpx",
 ]
 
 [tool.setuptools.dynamic]
@@ -72,3 +72,7 @@ include = ["databricks", "databricks.*"]
 
 [tool.setuptools.package-data]
 "databricks.sdk" = ["py.typed"]
+
+[tool.black]
+line-length = 120 
+target-version = ['py37', 'py38', 'py39', 'py310', 'py311','py312','py313']
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
index 0adaaf3ba..2a1d9ab5e 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -13,8 +13,9 @@ remove-unused-variables = true
 ignore = E126,E261,E501,E701,W293
 max-line-length = 120
 
-[yapf]
-based_on_style = pep8
+[black]
+line-length = 120
+target-version = ['py37', 'py38', 'py39', 'py310', 'py311','py312','py313']
 
 COLUMN_LIMIT = 110
 INDENT_WIDTH = 4
diff --git a/tests/conftest.py b/tests/conftest.py
index 0f415ecf1..9dddc16a2 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -11,19 +11,20 @@
 from .integration.conftest import restorable_env  # type: ignore
 
 
-@credentials_strategy('noop', [])
+@credentials_strategy("noop", [])
 def noop_credentials(_: any):
     return lambda: {}
 
 
 @pytest.fixture
 def config():
-    return Config(host='http://localhost', credentials_strategy=noop_credentials)
+    return Config(host="http://localhost", credentials_strategy=noop_credentials)
 
 
 @pytest.fixture
 def w(config):
     from databricks.sdk import WorkspaceClient
+
     return WorkspaceClient(config=config)
 
 
@@ -39,11 +40,11 @@ def wrapper(*args, **kwargs):
             with pytest.raises(ValueError) as info:
                 func(*args, **kwargs)
             exception_str = str(info.value)
-            if platform.system() == 'Windows':
-                exception_str = exception_str.replace(__tests__ + '\\', '')
-                exception_str = exception_str.replace('\\', '/')
+            if platform.system() == "Windows":
+                exception_str = exception_str.replace(__tests__ + "\\", "")
+                exception_str = exception_str.replace("\\", "/")
             else:
-                exception_str = exception_str.replace(__tests__ + '/', '')
+                exception_str = exception_str.replace(__tests__ + "/", "")
             assert msg in exception_str
 
         return wrapper
@@ -61,22 +62,25 @@ def fake_fs():
         test_data_path = __tests__
         patcher.fs.add_real_directory(test_data_path)
 
-        yield patcher.fs # This will return a fake file system
+        yield patcher.fs  # This will return a fake file system
 
 
 def set_home(monkeypatch, path):
-    if platform.system() == 'Windows':
-        monkeypatch.setenv('USERPROFILE', __tests__ + path)
+    if platform.system() == "Windows":
+        monkeypatch.setenv("USERPROFILE", __tests__ + path)
     else:
-        monkeypatch.setenv('HOME', __tests__ + path)
+        monkeypatch.setenv("HOME", __tests__ + path)
 
 
 def set_az_path(monkeypatch):
-    if platform.system() == 'Windows':
-        monkeypatch.setenv('Path', __tests__ + "\\testdata\\windows\\")
-        monkeypatch.setenv('COMSPEC', 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe')
+    if platform.system() == "Windows":
+        monkeypatch.setenv("Path", __tests__ + "\\testdata\\windows\\")
+        monkeypatch.setenv(
+            "COMSPEC",
+            "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe",
+        )
     else:
-        monkeypatch.setenv('PATH', __tests__ + "/testdata:/bin")
+        monkeypatch.setenv("PATH", __tests__ + "/testdata:/bin")
 
 
 @pytest.fixture
@@ -84,9 +88,10 @@ def mock_tenant(requests_mock):
 
     def stub_tenant_request(host, tenant_id="test-tenant-id"):
         mock = requests_mock.get(
-            f'https://{host}/aad/auth',
+            f"https://{host}/aad/auth",
             status_code=302,
-            headers={'Location': f'https://login.microsoftonline.com/{tenant_id}/oauth2/authorize'})
+            headers={"Location": f"https://login.microsoftonline.com/{tenant_id}/oauth2/authorize"},
+        )
         return mock
 
     return stub_tenant_request
diff --git a/tests/fixture_server.py b/tests/fixture_server.py
index e15f9cf2d..97b45bcff 100644
--- a/tests/fixture_server.py
+++ b/tests/fixture_server.py
@@ -5,27 +5,33 @@
 
 
 @contextlib.contextmanager
-def http_fixture_server(handler: typing.Callable[[BaseHTTPRequestHandler], None]):
+def http_fixture_server(
+    handler: typing.Callable[[BaseHTTPRequestHandler], None],
+):
     from http.server import HTTPServer
     from threading import Thread
 
     class _handler(BaseHTTPRequestHandler):
 
-        def __init__(self, handler: typing.Callable[[BaseHTTPRequestHandler], None], *args):
+        def __init__(
+            self,
+            handler: typing.Callable[[BaseHTTPRequestHandler], None],
+            *args,
+        ):
             self._handler = handler
             super().__init__(*args)
 
         def __getattr__(self, item):
-            if 'do_' != item[0:3]:
-                raise AttributeError(f'method {item} not found')
+            if "do_" != item[0:3]:
+                raise AttributeError(f"method {item} not found")
             return functools.partial(self._handler, self)
 
     handler_factory = functools.partial(_handler, handler)
-    srv = HTTPServer(('localhost', 0), handler_factory)
+    srv = HTTPServer(("localhost", 0), handler_factory)
     t = Thread(target=srv.serve_forever)
     try:
         t.daemon = True
         t.start()
-        yield 'http://{0}:{1}'.format(*srv.server_address)
+        yield "http://{0}:{1}".format(*srv.server_address)
     finally:
         srv.shutdown()
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index e9c5430dd..95dd10fd1 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -13,45 +13,54 @@
 
 def pytest_addoption(parser):
     # make logging sensible and readable.
-    parser.addini('log_format', '...', 'string', '%(asctime)s [%(name)s][%(levelname)s] %(message)s')
-    parser.addini('log_date_format', '...', 'string', '%H:%M')
+    parser.addini(
+        "log_format",
+        "...",
+        "string",
+        "%(asctime)s [%(name)s][%(levelname)s] %(message)s",
+    )
+    parser.addini("log_date_format", "...", "string", "%H:%M")
 
 
 def pytest_configure(config):
     # disable urllib3, as it adds more noise
-    logger = logging.getLogger('urllib3.connectionpool')
+    logger = logging.getLogger("urllib3.connectionpool")
     logger.propagate = False
 
-    config.addinivalue_line('markers',
-                            'integration: marks tests as those requiring a real Databricks backend')
-    config.addinivalue_line('markers',
-                            'benchmark: marks tests as benchmarks which should not be run by default')
+    config.addinivalue_line(
+        "markers",
+        "integration: marks tests as those requiring a real Databricks backend",
+    )
+    config.addinivalue_line(
+        "markers",
+        "benchmark: marks tests as benchmarks which should not be run by default",
+    )
 
 
 def pytest_collection_modifyitems(items):
     # safer to refer to fixture fns instead of strings
     client_fixtures = [x.__name__ for x in [a, w, ucws]]
     for item in items:
-        current_fixtures = getattr(item, 'fixturenames', ())
+        current_fixtures = getattr(item, "fixturenames", ())
         for requires_client in client_fixtures:
             if requires_client in current_fixtures:
-                item.add_marker('integration')
+                item.add_marker("integration")
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def random():
     import random
 
     def inner(k=16) -> str:
         charset = string.ascii_uppercase + string.ascii_lowercase + string.digits
-        return ''.join(random.choices(charset, k=int(k)))
+        return "".join(random.choices(charset, k=int(k)))
 
     return inner
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def a(env_or_skip) -> AccountClient:
-    _load_debug_env_if_runs_from_ide('account')
+    _load_debug_env_if_runs_from_ide("account")
     env_or_skip("CLOUD_ENV")
     account_client = AccountClient()
     if not account_client.config.is_account_client:
@@ -59,66 +68,66 @@ def a(env_or_skip) -> AccountClient:
     return account_client
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def ucacct(env_or_skip) -> AccountClient:
-    _load_debug_env_if_runs_from_ide('ucacct')
+    _load_debug_env_if_runs_from_ide("ucacct")
     env_or_skip("CLOUD_ENV")
     account_client = AccountClient()
     if not account_client.config.is_account_client:
         pytest.skip("not Databricks Account client")
-    if 'TEST_METASTORE_ID' not in os.environ:
+    if "TEST_METASTORE_ID" not in os.environ:
         pytest.skip("not in Unity Catalog Workspace test env")
     return account_client
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def w(env_or_skip) -> WorkspaceClient:
-    _load_debug_env_if_runs_from_ide('workspace')
+    _load_debug_env_if_runs_from_ide("workspace")
     env_or_skip("CLOUD_ENV")
-    if 'DATABRICKS_ACCOUNT_ID' in os.environ:
+    if "DATABRICKS_ACCOUNT_ID" in os.environ:
         pytest.skip("Skipping workspace test on account level")
     return WorkspaceClient()
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def ucws(env_or_skip) -> WorkspaceClient:
-    _load_debug_env_if_runs_from_ide('ucws')
+    _load_debug_env_if_runs_from_ide("ucws")
     env_or_skip("CLOUD_ENV")
-    if 'DATABRICKS_ACCOUNT_ID' in os.environ:
+    if "DATABRICKS_ACCOUNT_ID" in os.environ:
         pytest.skip("Skipping workspace test on account level")
-    if 'TEST_METASTORE_ID' not in os.environ:
+    if "TEST_METASTORE_ID" not in os.environ:
         pytest.skip("not in Unity Catalog Workspace test env")
     return WorkspaceClient()
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def env_or_skip():
 
     def inner(var: str) -> str:
         if var not in os.environ:
-            pytest.skip(f'Environment variable {var} is missing')
+            pytest.skip(f"Environment variable {var} is missing")
         return os.environ[var]
 
     return inner
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def schema(ucws, random):
-    schema = ucws.schemas.create('dbfs-' + random(), 'main')
+    schema = ucws.schemas.create("dbfs-" + random(), "main")
     yield schema
     ucws.schemas.delete(schema.full_name)
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def volume(ucws, schema):
-    volume = ucws.volumes.create('main', schema.name, 'dbfs-test', VolumeType.MANAGED)
-    yield '/Volumes/' + volume.full_name.replace(".", "/")
+    volume = ucws.volumes.create("main", schema.name, "dbfs-test", VolumeType.MANAGED)
+    yield "/Volumes/" + volume.full_name.replace(".", "/")
     ucws.volumes.delete(volume.full_name)
 
 
 @pytest.fixture()
 def workspace_dir(w, random):
-    directory = f'/Users/{w.current_user.me().user_name}/dir-{random(12)}'
+    directory = f"/Users/{w.current_user.me().user_name}/dir-{random(12)}"
     w.workspace.mkdirs(directory)
     yield directory
     w.workspace.delete(directory, recursive=True)
@@ -127,23 +136,27 @@ def workspace_dir(w, random):
 def _load_debug_env_if_runs_from_ide(key) -> bool:
     if not _is_in_debug():
         return False
-    conf_file = pathlib.Path.home() / '.databricks/debug-env.json'
-    with conf_file.open('r') as f:
+    conf_file = pathlib.Path.home() / ".databricks/debug-env.json"
+    with conf_file.open("r") as f:
         conf = json.load(f)
         if key not in conf:
-            raise KeyError(f'{key} not found in ~/.databricks/debug-env.json')
+            raise KeyError(f"{key} not found in ~/.databricks/debug-env.json")
         for k, v in conf[key].items():
             os.environ[k] = v
     return True
 
 
 def _is_in_debug() -> bool:
-    return os.path.basename(sys.argv[0]) in ['_jb_pytest_runner.py', 'testlauncher.py', ]
+    return os.path.basename(sys.argv[0]) in [
+        "_jb_pytest_runner.py",
+        "testlauncher.py",
+    ]
 
 
 @pytest.fixture(scope="function")
 def restorable_env():
     import os
+
     current_env = os.environ.copy()
     yield
     for k, v in os.environ.items():
diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py
index 41b901c9a..c6904599f 100644
--- a/tests/integration/test_auth.py
+++ b/tests/integration/test_auth.py
@@ -22,18 +22,21 @@
 def fresh_wheel_file(tmp_path) -> Path:
     this_file = Path(__file__)
     project_root = this_file.parent.parent.parent.absolute()
-    build_root = tmp_path / 'databricks-sdk-py'
+    build_root = tmp_path / "databricks-sdk-py"
     shutil.copytree(project_root, build_root)
     try:
-        completed_process = subprocess.run([sys.executable, '-m', 'build'],
-                                           capture_output=True,
-                                           cwd=build_root)
+        completed_process = subprocess.run(
+            [sys.executable, "-m", "build"],
+            capture_output=True,
+            cwd=build_root,
+        )
         if completed_process.returncode != 0:
             raise RuntimeError(completed_process.stderr)
 
         from databricks.sdk.version import __version__
-        filename = f'databricks_sdk-{__version__}-py3-none-any.whl'
-        wheel_file = build_root / 'dist' / filename
+
+        filename = f"databricks_sdk-{__version__}-py3-none-any.whl"
+        wheel_file = build_root / "dist" / filename
 
         return wheel_file
     except subprocess.CalledProcessError as e:
@@ -42,45 +45,55 @@ def fresh_wheel_file(tmp_path) -> Path:
 
 @pytest.mark.parametrize("mode", [DataSecurityMode.SINGLE_USER, DataSecurityMode.USER_ISOLATION])
 def test_runtime_auth_from_interactive_on_uc(ucws, fresh_wheel_file, env_or_skip, random, mode):
-    instance_pool_id = env_or_skip('TEST_INSTANCE_POOL_ID')
+    instance_pool_id = env_or_skip("TEST_INSTANCE_POOL_ID")
     latest = ucws.clusters.select_spark_version(latest=True)
 
     my_user = ucws.current_user.me().user_name
 
-    workspace_location = f'/Users/{my_user}/wheels/{random(10)}'
+    workspace_location = f"/Users/{my_user}/wheels/{random(10)}"
     ucws.workspace.mkdirs(workspace_location)
 
-    wsfs_wheel = f'{workspace_location}/{fresh_wheel_file.name}'
-    with fresh_wheel_file.open('rb') as f:
+    wsfs_wheel = f"{workspace_location}/{fresh_wheel_file.name}"
+    with fresh_wheel_file.open("rb") as f:
         ucws.workspace.upload(wsfs_wheel, f, format=ImportFormat.AUTO)
 
     from databricks.sdk.service.compute import Language
-    interactive_cluster = ucws.clusters.create(cluster_name=f'native-auth-on-{mode.name}',
-                                               spark_version=latest,
-                                               instance_pool_id=instance_pool_id,
-                                               autotermination_minutes=10,
-                                               num_workers=1,
-                                               data_security_mode=mode).result()
-    ctx = ucws.command_execution.create(cluster_id=interactive_cluster.cluster_id,
-                                        language=Language.PYTHON).result()
-    run = partial(ucws.command_execution.execute,
-                  cluster_id=interactive_cluster.cluster_id,
-                  context_id=ctx.id,
-                  language=Language.PYTHON)
+
+    interactive_cluster = ucws.clusters.create(
+        cluster_name=f"native-auth-on-{mode.name}",
+        spark_version=latest,
+        instance_pool_id=instance_pool_id,
+        autotermination_minutes=10,
+        num_workers=1,
+        data_security_mode=mode,
+    ).result()
+    ctx = ucws.command_execution.create(cluster_id=interactive_cluster.cluster_id, language=Language.PYTHON).result()
+    run = partial(
+        ucws.command_execution.execute,
+        cluster_id=interactive_cluster.cluster_id,
+        context_id=ctx.id,
+        language=Language.PYTHON,
+    )
     try:
         res = run(command=f"%pip install /Workspace{wsfs_wheel}\ndbutils.library.restartPython()").result()
         results = res.results
         if results.result_type != ResultType.TEXT:
-            msg = f'({mode}) unexpected result type: {results.result_type}: {results.summary}\n{results.cause}'
+            msg = f"({mode}) unexpected result type: {results.result_type}: {results.summary}\n{results.cause}"
             raise RuntimeError(msg)
 
-        res = run(command="\n".join([
-            'from databricks.sdk import WorkspaceClient', 'w = WorkspaceClient()', 'me = w.current_user.me()',
-            'print(me.user_name)'
-        ])).result()
-        assert res.results.result_type == ResultType.TEXT, f'unexpected result type: {res.results.result_type}'
-
-        assert my_user == res.results.data, f'unexpected user: {res.results.data}'
+        res = run(
+            command="\n".join(
+                [
+                    "from databricks.sdk import WorkspaceClient",
+                    "w = WorkspaceClient()",
+                    "me = w.current_user.me()",
+                    "print(me.user_name)",
+                ]
+            )
+        ).result()
+        assert res.results.result_type == ResultType.TEXT, f"unexpected result type: {res.results.result_type}"
+
+        assert my_user == res.results.data, f"unexpected user: {res.results.data}"
     finally:
         ucws.clusters.permanent_delete(interactive_cluster.cluster_id)
 
@@ -88,17 +101,18 @@ def test_runtime_auth_from_interactive_on_uc(ucws, fresh_wheel_file, env_or_skip
 def _get_lts_versions(w) -> typing.List[SparkVersion]:
     v = w.clusters.spark_versions()
     lts_runtimes = [
-        x for x in v.versions
-        if 'LTS' in x.name and '-ml' not in x.key and '-photon' not in x.key and '-aarch64' not in x.key
+        x
+        for x in v.versions
+        if "LTS" in x.name and "-ml" not in x.key and "-photon" not in x.key and "-aarch64" not in x.key
     ]
     return lts_runtimes
 
 
 def test_runtime_auth_from_jobs_volumes(ucws, fresh_wheel_file, env_or_skip, random, volume):
-    dbr_versions = [v for v in _get_lts_versions(ucws) if int(v.key.split('.')[0]) >= 15]
+    dbr_versions = [v for v in _get_lts_versions(ucws) if int(v.key.split(".")[0]) >= 15]
 
-    volume_wheel = f'{volume}/tmp/wheels/{random(10)}/{fresh_wheel_file.name}'
-    with fresh_wheel_file.open('rb') as f:
+    volume_wheel = f"{volume}/tmp/wheels/{random(10)}/{fresh_wheel_file.name}"
+    with fresh_wheel_file.open("rb") as f:
         ucws.files.upload(volume_wheel, f)
 
     lib = Library(whl=volume_wheel)
@@ -107,29 +121,37 @@ def test_runtime_auth_from_jobs_volumes(ucws, fresh_wheel_file, env_or_skip, ran
 
 def test_runtime_auth_from_jobs_dbfs(w, fresh_wheel_file, env_or_skip, random):
     # Library installation from DBFS is not supported past DBR 14.3
-    dbr_versions = [v for v in _get_lts_versions(w) if int(v.key.split('.')[0]) < 15]
+    dbr_versions = [v for v in _get_lts_versions(w) if int(v.key.split(".")[0]) < 15]
 
-    dbfs_wheel = f'/tmp/wheels/{random(10)}/{fresh_wheel_file.name}'
-    with fresh_wheel_file.open('rb') as f:
+    dbfs_wheel = f"/tmp/wheels/{random(10)}/{fresh_wheel_file.name}"
+    with fresh_wheel_file.open("rb") as f:
         w.dbfs.upload(dbfs_wheel, f)
 
-    lib = Library(whl=f'dbfs:{dbfs_wheel}')
+    lib = Library(whl=f"dbfs:{dbfs_wheel}")
     return _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib)
 
 
 def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, library):
-    instance_pool_id = env_or_skip('TEST_INSTANCE_POOL_ID')
+    instance_pool_id = env_or_skip("TEST_INSTANCE_POOL_ID")
 
     my_name = w.current_user.me().user_name
-    notebook_path = f'/Users/{my_name}/notebook-native-auth'
-    notebook_content = io.BytesIO(b'''
+    notebook_path = f"/Users/{my_name}/notebook-native-auth"
+    notebook_content = io.BytesIO(
+        b"""
 from databricks.sdk import WorkspaceClient
 w = WorkspaceClient()
 me = w.current_user.me()
-print(me.user_name)''')
+print(me.user_name)"""
+    )
 
     from databricks.sdk.service.workspace import Language
-    w.workspace.upload(notebook_path, notebook_content, language=Language.PYTHON, overwrite=True)
+
+    w.workspace.upload(
+        notebook_path,
+        notebook_content,
+        language=Language.PYTHON,
+        overwrite=True,
+    )
 
     tasks = []
     for v in dbr_versions:
@@ -141,14 +163,16 @@ def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib
                 num_workers=1,
                 instance_pool_id=instance_pool_id,
                 # GCP uses "custom" data security mode by default, which does not support UC.
-                data_security_mode=DataSecurityMode.SINGLE_USER),
-            libraries=[library])
+                data_security_mode=DataSecurityMode.SINGLE_USER,
+            ),
+            libraries=[library],
+        )
         tasks.append(t)
 
-    waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks)
+    waiter = w.jobs.submit(run_name=f"Runtime Native Auth {random(10)}", tasks=tasks)
     run = waiter.result()
     for task_key, output in _task_outputs(w, run).items():
-        assert my_name in output, f'{task_key} does not work with notebook native auth'
+        assert my_name in output, f"{task_key} does not work with notebook native auth"
 
 
 def _task_outputs(w, run):
@@ -156,21 +180,21 @@ def _task_outputs(w, run):
 
     task_outputs = {}
     for task_run in run.tasks:
-        output = ''
+        output = ""
         run_output = w.jobs.export_run(task_run.run_id)
         for view in run_output.views:
             if view.type != ViewType.NOTEBOOK:
                 continue
             for b64 in notebook_model_re.findall(view.content):
                 url_encoded: bytes = base64.b64decode(b64)
-                json_encoded = urllib.parse.unquote(url_encoded.decode('utf-8'))
+                json_encoded = urllib.parse.unquote(url_encoded.decode("utf-8"))
                 notebook_model = json.loads(json_encoded)
-                for command in notebook_model['commands']:
-                    results_data = command['results']['data']
+                for command in notebook_model["commands"]:
+                    results_data = command["results"]["data"]
                     if isinstance(results_data, str):
                         output += results_data
                     else:
                         for data in results_data:
-                            output += data['data']
+                            output += data["data"]
         task_outputs[task_run.task_key] = output
     return task_outputs
diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py
index eab9c4713..4e13d6854 100644
--- a/tests/integration/test_client.py
+++ b/tests/integration/test_client.py
@@ -7,12 +7,12 @@ def test_get_workspace_client(ucacct, env_or_skip):
 
 
 def test_get_workspace_id(ucws, env_or_skip):
-    ws_id = int(env_or_skip('THIS_WORKSPACE_ID'))
+    ws_id = int(env_or_skip("THIS_WORKSPACE_ID"))
     assert ucws.get_workspace_id() == ws_id
 
 
 def test_creating_ws_client_from_ac_client_does_not_override_config(ucacct, env_or_skip):
-    ws_id = env_or_skip('TEST_WORKSPACE_ID')
+    ws_id = env_or_skip("TEST_WORKSPACE_ID")
     ws = ucacct.workspaces.get(ws_id)
     w = ucacct.get_workspace_client(ws)
     me = w.current_user.me()
diff --git a/tests/integration/test_clusters.py b/tests/integration/test_clusters.py
index f3a9c6c89..dd388d2ed 100644
--- a/tests/integration/test_clusters.py
+++ b/tests/integration/test_clusters.py
@@ -31,17 +31,19 @@ def test_ensure_cluster_is_running(w, env_or_skip):
 
 
 def test_create_cluster(w, env_or_skip, random):
-    info = w.clusters.create(cluster_name=f'databricks-sdk-py-{random(8)}',
-                             spark_version=w.clusters.select_spark_version(long_term_support=True),
-                             instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
-                             autotermination_minutes=10,
-                             num_workers=1).result(timeout=timedelta(minutes=20))
-    logging.info(f'Created: {info}')
+    info = w.clusters.create(
+        cluster_name=f"databricks-sdk-py-{random(8)}",
+        spark_version=w.clusters.select_spark_version(long_term_support=True),
+        instance_pool_id=env_or_skip("TEST_INSTANCE_POOL_ID"),
+        autotermination_minutes=10,
+        num_workers=1,
+    ).result(timeout=timedelta(minutes=20))
+    logging.info(f"Created: {info}")
 
 
 def test_error_unmarshall(w, random):
     with pytest.raises(DatabricksError) as exc_info:
-        w.clusters.get('123__non_existing__')
+        w.clusters.get("123__non_existing__")
     err = exc_info.value
-    assert 'Cluster 123__non_existing__ does not exist' in str(err)
-    assert 'INVALID_PARAMETER_VALUE' == err.error_code
+    assert "Cluster 123__non_existing__ does not exist" in str(err)
+    assert "INVALID_PARAMETER_VALUE" == err.error_code
diff --git a/tests/integration/test_commands.py b/tests/integration/test_commands.py
index 38156e6ce..e60302818 100644
--- a/tests/integration/test_commands.py
+++ b/tests/integration/test_commands.py
@@ -5,7 +5,7 @@
 
 def test_error_unmarshall(w, random):
     with pytest.raises(DatabricksError) as exc_info:
-        w.command_execution.execute(cluster_id='__non_existing__')
+        w.command_execution.execute(cluster_id="__non_existing__")
     err = exc_info.value
-    assert 'requirement failed: missing contextId' in str(err)
+    assert "requirement failed: missing contextId" in str(err)
     assert err.error_code is None
diff --git a/tests/integration/test_dbconnect.py b/tests/integration/test_dbconnect.py
index 59d327ae1..ecc82855b 100644
--- a/tests/integration/test_dbconnect.py
+++ b/tests/integration/test_dbconnect.py
@@ -1,12 +1,15 @@
 import pytest
 
-DBCONNECT_DBR_CLIENT = {"13.3": "13.3.3", "14.3": "14.3.1", }
+DBCONNECT_DBR_CLIENT = {
+    "13.3": "13.3.3",
+    "14.3": "14.3.1",
+}
 
 
 def reload_modules(name: str):
     """
-    Reloads the specified module. This is useful when testing Databricks Connect, since both 
-    the `databricks.connect` and `databricks.sdk.runtime` modules are stateful, and we need 
+    Reloads the specified module. This is useful when testing Databricks Connect, since both
+    the `databricks.connect` and `databricks.sdk.runtime` modules are stateful, and we need
     to reload these modules to reset the state cache between test runs.
     """
 
@@ -26,14 +29,17 @@ def reload_modules(name: str):
 @pytest.fixture(params=list(DBCONNECT_DBR_CLIENT.keys()))
 def setup_dbconnect_test(request, env_or_skip, restorable_env):
     dbr = request.param
-    assert dbr in DBCONNECT_DBR_CLIENT, f"Unsupported Databricks Runtime version {dbr}. Please update DBCONNECT_DBR_CLIENT."
+    assert (
+        dbr in DBCONNECT_DBR_CLIENT
+    ), f"Unsupported Databricks Runtime version {dbr}. Please update DBCONNECT_DBR_CLIENT."
 
     import os
-    os.environ["DATABRICKS_CLUSTER_ID"] = env_or_skip(
-        f"TEST_DBR_{dbr.replace('.', '_')}_DBCONNECT_CLUSTER_ID")
+
+    os.environ["DATABRICKS_CLUSTER_ID"] = env_or_skip(f"TEST_DBR_{dbr.replace('.', '_')}_DBCONNECT_CLUSTER_ID")
 
     import subprocess
     import sys
+
     lib = f"databricks-connect=={DBCONNECT_DBR_CLIENT[dbr]}"
     subprocess.check_call([sys.executable, "-m", "pip", "install", lib])
 
diff --git a/tests/integration/test_dbutils.py b/tests/integration/test_dbutils.py
index e486f2282..feafac00a 100644
--- a/tests/integration/test_dbutils.py
+++ b/tests/integration/test_dbutils.py
@@ -11,7 +11,7 @@
 def test_rest_dbfs_ls(w, env_or_skip):
     from databricks.sdk.runtime import dbutils
 
-    x = dbutils.fs.ls('/')
+    x = dbutils.fs.ls("/")
 
     assert len(x) > 1
 
@@ -24,11 +24,11 @@ def test_proxy_dbfs_mounts(w, env_or_skip):
     assert len(x) > 1
 
 
-@pytest.fixture(params=['dbfs', 'volumes'])
+@pytest.fixture(params=["dbfs", "volumes"])
 def fs_and_base_path(request, ucws, volume):
-    if request.param == 'dbfs':
+    if request.param == "dbfs":
         fs = ucws.dbutils.fs
-        base_path = '/tmp'
+        base_path = "/tmp"
     else:
         fs = ucws.dbutils.fs
         base_path = volume
@@ -54,8 +54,8 @@ def test_large_put(fs_and_base_path):
 def test_put_local_path(w, random, tmp_path):
     to_write = random(1024 * 1024 * 2)
     tmp_path = tmp_path / "tmp_file"
-    w.dbutils.fs.put(f'file:{tmp_path}', to_write, True)
-    assert w.dbutils.fs.head(f'file:{tmp_path}', 1024 * 1024 * 2) == to_write
+    w.dbutils.fs.put(f"file:{tmp_path}", to_write, True)
+    assert w.dbutils.fs.head(f"file:{tmp_path}", 1024 * 1024 * 2) == to_write
 
 
 def test_cp_file(fs_and_base_path, random):
@@ -120,7 +120,7 @@ def test_mv_dir(fs_and_base_path, random):
     fs.put(path + "/file1", "test1", True)
     fs.put(path + "/file2", "test2", True)
     # DBFS can do recursive mv as a single API call, but Volumes cannot
-    kw = {'recurse': True} if '/Volumes' in path else {}
+    kw = {"recurse": True} if "/Volumes" in path else {}
     fs.mv(path, path + "_moved", **kw)
     output = fs.ls(path + "_moved")
     assert len(output) == 2
@@ -135,7 +135,7 @@ def test_mv_local_to_remote(fs_and_base_path, random, tmp_path):
     path = base_path + "/dbc_qa_file-" + random()
     with open(tmp_path / "test", "w") as f:
         f.write("test")
-    fs.mv('file:' + str(tmp_path / "test"), path)
+    fs.mv("file:" + str(tmp_path / "test"), path)
     output = fs.head(path)
     assert output == "test"
     assert os.listdir(tmp_path) == []
@@ -145,7 +145,7 @@ def test_mv_remote_to_local(fs_and_base_path, random, tmp_path):
     fs, base_path = fs_and_base_path
     path = base_path + "/dbc_qa_file-" + random()
     fs.put(path, "test", True)
-    fs.mv(path, 'file:' + str(tmp_path / "test"))
+    fs.mv(path, "file:" + str(tmp_path / "test"))
     with open(tmp_path / "test", "r") as f:
         output = f.read()
     assert output == "test"
@@ -176,19 +176,21 @@ def test_rm_dir(fs_and_base_path, random):
 
 
 def test_secrets(w, random):
-    random_scope = f'scope-{random()}'
-    key_for_string = f'string-{random()}'
-    key_for_bytes = f'bytes-{random()}'
-    random_value = f'SECRET-{random()}'
+    random_scope = f"scope-{random()}"
+    key_for_string = f"string-{random()}"
+    key_for_bytes = f"bytes-{random()}"
+    random_value = f"SECRET-{random()}"
 
-    logger = logging.getLogger('foo')
-    logger.info(f'Before loading secret: {random_value}')
+    logger = logging.getLogger("foo")
+    logger.info(f"Before loading secret: {random_value}")
 
     w.secrets.create_scope(random_scope)
     w.secrets.put_secret(random_scope, key_for_string, string_value=random_value)
-    w.secrets.put_secret(random_scope,
-                         key_for_bytes,
-                         bytes_value=base64.b64encode(random_value.encode()).decode())
+    w.secrets.put_secret(
+        random_scope,
+        key_for_bytes,
+        bytes_value=base64.b64encode(random_value.encode()).decode(),
+    )
 
     from databricks.sdk.runtime import dbutils
 
@@ -201,12 +203,12 @@ def test_secrets(w, random):
         try:
             all_secrets[key] = dbutils.secrets.get(random_scope, key)
         except DatabricksError as e:
-            if e.error_code == 'BAD_REQUEST':
-                pytest.skip('dbconnect is not enabled on this workspace')
+            if e.error_code == "BAD_REQUEST":
+                pytest.skip("dbconnect is not enabled on this workspace")
             raise e
 
-    logger.info(f'After loading secret: {random_value}')
-    logging.getLogger('databricks.sdk').info(f'After loading secret: {random_value}')
+    logger.info(f"After loading secret: {random_value}")
+    logging.getLogger("databricks.sdk").info(f"After loading secret: {random_value}")
 
     assert all_secrets[key_for_string] == random_value
     assert all_secrets[key_for_bytes] == random_value
diff --git a/tests/integration/test_deployment.py b/tests/integration/test_deployment.py
index 007751b58..2071645d2 100644
--- a/tests/integration/test_deployment.py
+++ b/tests/integration/test_deployment.py
@@ -5,6 +5,6 @@
 
 def test_workspaces(a):
     if a.config.is_azure:
-        pytest.skip('not available on Azure')
+        pytest.skip("not available on Azure")
     for w in a.workspaces.list():
-        logging.info(f'Found workspace: {w.workspace_name}')
\ No newline at end of file
+        logging.info(f"Found workspace: {w.workspace_name}")
diff --git a/tests/integration/test_external_browser.py b/tests/integration/test_external_browser.py
index 332a89a17..883069217 100644
--- a/tests/integration/test_external_browser.py
+++ b/tests/integration/test_external_browser.py
@@ -5,36 +5,42 @@
 from .conftest import _load_debug_env_if_runs_from_ide
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def env(env_or_skip):
-    if not _load_debug_env_if_runs_from_ide('workspace'):
-        pytest.skip('runnable only on dev machines')
+    if not _load_debug_env_if_runs_from_ide("workspace"):
+        pytest.skip("runnable only on dev machines")
     return env_or_skip
 
 
 def test_pkce_app(env):
-    w = WorkspaceClient(host=env('DATABRICKS_HOST'),
-                        client_id=env('TEST_PKCE_APP_CLIENT_ID'),
-                        auth_type='external-browser')
+    w = WorkspaceClient(
+        host=env("DATABRICKS_HOST"),
+        client_id=env("TEST_PKCE_APP_CLIENT_ID"),
+        auth_type="external-browser",
+    )
     clusters = w.clusters.list()
     for cl in clusters:
-        print(f' - {cl.cluster_name} is {cl.state}')
+        print(f" - {cl.cluster_name} is {cl.state}")
 
 
 def test_public_app(env):
-    w = WorkspaceClient(host=env('DATABRICKS_HOST'),
-                        client_id=env('TEST_PUBLIC_APP_CLIENT_ID'),
-                        auth_type='external-browser')
+    w = WorkspaceClient(
+        host=env("DATABRICKS_HOST"),
+        client_id=env("TEST_PUBLIC_APP_CLIENT_ID"),
+        auth_type="external-browser",
+    )
     clusters = w.clusters.list()
     for cl in clusters:
-        print(f' - {cl.cluster_name} is {cl.state}')
+        print(f" - {cl.cluster_name} is {cl.state}")
 
 
 def test_private_app(env):
-    w = WorkspaceClient(host=env('DATABRICKS_HOST'),
-                        client_id=env('TEST_PRIVATE_APP_CLIENT_ID'),
-                        client_secret=env('TEST_PRIVATE_APP_CLIENT_SECRET'),
-                        auth_type='external-browser')
+    w = WorkspaceClient(
+        host=env("DATABRICKS_HOST"),
+        client_id=env("TEST_PRIVATE_APP_CLIENT_ID"),
+        client_secret=env("TEST_PRIVATE_APP_CLIENT_SECRET"),
+        auth_type="external-browser",
+    )
     clusters = w.clusters.list()
     for cl in clusters:
-        print(f' - {cl.cluster_name} is {cl.state}')
+        print(f" - {cl.cluster_name} is {cl.state}")
diff --git a/tests/integration/test_files.py b/tests/integration/test_files.py
index 7b9ede556..932f85550 100644
--- a/tests/integration/test_files.py
+++ b/tests/integration/test_files.py
@@ -12,22 +12,22 @@
 
 
 def test_local_io(random):
-    if platform.system() == 'Windows':
-        dummy_file = f'C:\\Windows\\Temp\\{random()}'
+    if platform.system() == "Windows":
+        dummy_file = f"C:\\Windows\\Temp\\{random()}"
     else:
-        dummy_file = f'/tmp/{random()}'
+        dummy_file = f"/tmp/{random()}"
     to_write = random(1024 * 1024 * 2.5).encode()
-    with open(dummy_file, 'wb') as f:
+    with open(dummy_file, "wb") as f:
         written = f.write(to_write)
         assert len(to_write) == written
 
-    f = open(dummy_file, 'rb')
+    f = open(dummy_file, "rb")
     assert f.read() == to_write
     f.close()
 
 
 def test_dbfs_io(w, random):
-    dummy_file = f'/tmp/{random()}'
+    dummy_file = f"/tmp/{random()}"
     to_write = random(1024 * 1024 * 1.5).encode()
     with w.dbfs.open(dummy_file, write=True) as f:
         written = f.write(to_write)
@@ -62,131 +62,131 @@ def inner(root: str, recursive=False) -> List[str]:
 
 
 def test_recursive_listing(w, random, junk, ls):
-    root = f'/tmp/{random()}'
-    junk(f'{root}/01')
-    junk(f'{root}/a/02')
-    junk(f'{root}/a/b/03')
+    root = f"/tmp/{random()}"
+    junk(f"{root}/01")
+    junk(f"{root}/a/02")
+    junk(f"{root}/a/b/03")
 
-    assert ['/01', '/a'] == ls(root)
-    assert ['/01', '/a/02', '/a/b/03'] == ls(root, recursive=True)
+    assert ["/01", "/a"] == ls(root)
+    assert ["/01", "/a/02", "/a/b/03"] == ls(root, recursive=True)
 
     w.dbfs.delete(root, recursive=True)
 
 
 def test_cp_dbfs_folder_to_folder_non_recursive(w, random, junk, ls):
-    root = f'/tmp/{random()}'
-    junk(f'{root}/01')
-    junk(f'{root}/a/02')
-    junk(f'{root}/a/b/03')
-    new_root = f'/tmp/{random()}'
+    root = f"/tmp/{random()}"
+    junk(f"{root}/01")
+    junk(f"{root}/a/02")
+    junk(f"{root}/a/b/03")
+    new_root = f"/tmp/{random()}"
 
     w.dbfs.copy(root, new_root)
 
-    assert ['/01'] == ls(new_root, recursive=True)
+    assert ["/01"] == ls(new_root, recursive=True)
 
 
 def test_cp_dbfs_folder_to_folder_recursive(w, random, junk, ls):
-    root = f'/tmp/{random()}'
-    junk(f'{root}/01')
-    junk(f'{root}/a/02')
-    junk(f'{root}/a/b/03')
-    new_root = f'/tmp/{random()}'
+    root = f"/tmp/{random()}"
+    junk(f"{root}/01")
+    junk(f"{root}/a/02")
+    junk(f"{root}/a/b/03")
+    new_root = f"/tmp/{random()}"
 
     w.dbfs.copy(root, new_root, recursive=True, overwrite=True)
 
-    assert ['/01', '/a/02', '/a/b/03'] == ls(new_root, recursive=True)
+    assert ["/01", "/a/02", "/a/b/03"] == ls(new_root, recursive=True)
 
 
 def test_cp_dbfs_folder_to_existing_folder_recursive(w, random, junk, ls):
-    root = f'/tmp/{random()}'
-    junk(f'{root}/01')
-    junk(f'{root}/a/02')
-    junk(f'{root}/a/b/03')
-    new_root = f'/tmp/{random()}'
+    root = f"/tmp/{random()}"
+    junk(f"{root}/01")
+    junk(f"{root}/a/02")
+    junk(f"{root}/a/b/03")
+    new_root = f"/tmp/{random()}"
 
     w.dbfs.mkdirs(new_root)
     w.dbfs.copy(root, new_root, recursive=True, overwrite=True)
 
-    base = root.split('/')[-1]
-    assert [f'/{base}/01', f'/{base}/a/02', f'/{base}/a/b/03'] == ls(new_root, recursive=True)
+    base = root.split("/")[-1]
+    assert [f"/{base}/01", f"/{base}/a/02", f"/{base}/a/b/03"] == ls(new_root, recursive=True)
 
 
 def test_cp_dbfs_file_to_non_existing_location(w, random, junk):
-    root = f'/tmp/{random()}'
-    payload = junk(f'{root}/01')
-    copy_destination = f'{root}/{random()}'
+    root = f"/tmp/{random()}"
+    payload = junk(f"{root}/01")
+    copy_destination = f"{root}/{random()}"
 
-    w.dbfs.copy(f'{root}/01', copy_destination)
+    w.dbfs.copy(f"{root}/01", copy_destination)
 
     with w.dbfs.open(copy_destination, read=True) as f:
         assert f.read() == payload
 
 
 def test_cp_dbfs_file_to_existing_folder(w, random, junk):
-    root = f'/tmp/{random()}'
-    payload = junk(f'{root}/01')
-    w.dbfs.mkdirs(f'{root}/02')
-    w.dbfs.copy(f'{root}/01', f'{root}/02')
+    root = f"/tmp/{random()}"
+    payload = junk(f"{root}/01")
+    w.dbfs.mkdirs(f"{root}/02")
+    w.dbfs.copy(f"{root}/01", f"{root}/02")
 
-    with w.dbfs.open(f'{root}/02/01', read=True) as f:
+    with w.dbfs.open(f"{root}/02/01", read=True) as f:
         assert f.read() == payload
 
 
 def test_cp_dbfs_file_to_existing_location(w, random, junk):
-    root = f'/tmp/{random()}'
-    junk(f'{root}/01')
-    junk(f'{root}/02')
+    root = f"/tmp/{random()}"
+    junk(f"{root}/01")
+    junk(f"{root}/02")
     with pytest.raises(DatabricksError) as ei:
-        w.dbfs.copy(f'{root}/01', f'{root}/02')
-    assert 'A file or directory already exists' in str(ei.value)
+        w.dbfs.copy(f"{root}/01", f"{root}/02")
+    assert "A file or directory already exists" in str(ei.value)
 
 
 def test_cp_dbfs_file_to_existing_location_with_overwrite(w, random, junk):
-    root = f'/tmp/{random()}'
-    payload = junk(f'{root}/01')
-    junk(f'{root}/02')
+    root = f"/tmp/{random()}"
+    payload = junk(f"{root}/01")
+    junk(f"{root}/02")
 
-    w.dbfs.copy(f'{root}/01', f'{root}/02', overwrite=True)
+    w.dbfs.copy(f"{root}/01", f"{root}/02", overwrite=True)
 
-    with w.dbfs.open(f'{root}/02', read=True) as f:
+    with w.dbfs.open(f"{root}/02", read=True) as f:
         assert f.read() == payload
 
 
 def test_move_within_dbfs(w, random, junk):
-    root = f'/tmp/{random()}'
-    payload = junk(f'{root}/01')
+    root = f"/tmp/{random()}"
+    payload = junk(f"{root}/01")
 
-    w.dbfs.move_(f'{root}/01', f'{root}/02')
+    w.dbfs.move_(f"{root}/01", f"{root}/02")
 
-    assert w.dbfs.exists(f'{root}/01') is False
-    with w.dbfs.open(f'{root}/02', read=True) as f:
+    assert w.dbfs.exists(f"{root}/01") is False
+    with w.dbfs.open(f"{root}/02", read=True) as f:
         assert f.read() == payload
 
 
 def test_move_from_dbfs_to_local(w, random, junk, tmp_path):
-    root = pathlib.Path(f'/tmp/{random()}')
-    payload_01 = junk(f'{root}/01')
-    payload_02 = junk(f'{root}/a/02')
-    payload_03 = junk(f'{root}/a/b/03')
+    root = pathlib.Path(f"/tmp/{random()}")
+    payload_01 = junk(f"{root}/01")
+    payload_02 = junk(f"{root}/a/02")
+    payload_03 = junk(f"{root}/a/b/03")
 
-    w.dbfs.move_(root, f'file:{tmp_path}', recursive=True)
+    w.dbfs.move_(root, f"file:{tmp_path}", recursive=True)
 
     assert w.dbfs.exists(root) is False
-    with (tmp_path / root.name / '01').open('rb') as f:
+    with (tmp_path / root.name / "01").open("rb") as f:
         assert f.read() == payload_01
-    with (tmp_path / root.name / 'a/02').open('rb') as f:
+    with (tmp_path / root.name / "a/02").open("rb") as f:
         assert f.read() == payload_02
-    with (tmp_path / root.name / 'a/b/03').open('rb') as f:
+    with (tmp_path / root.name / "a/b/03").open("rb") as f:
         assert f.read() == payload_03
 
 
 def test_dbfs_upload_download(w, random, junk, tmp_path):
-    root = pathlib.Path(f'/tmp/{random()}')
+    root = pathlib.Path(f"/tmp/{random()}")
 
     f = io.BytesIO(b"some text data")
-    w.dbfs.upload(f'{root}/01', f)
+    w.dbfs.upload(f"{root}/01", f)
 
-    with w.dbfs.download(f'{root}/01') as f:
+    with w.dbfs.download(f"{root}/01") as f:
         assert f.read() == b"some text data"
 
 
@@ -209,21 +209,23 @@ def create_schema(w, catalog, schema):
 
     @staticmethod
     def create_volume(w, catalog, schema, volume):
-        res = w.volumes.create(catalog_name=catalog,
-                               schema_name=schema,
-                               name=volume,
-                               volume_type=VolumeType.MANAGED)
+        res = w.volumes.create(
+            catalog_name=catalog,
+            schema_name=schema,
+            name=volume,
+            volume_type=VolumeType.MANAGED,
+        )
         return ResourceWithCleanup(lambda: w.volumes.delete(res.full_name))
 
 
 def test_files_api_upload_download(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
             f = io.BytesIO(b"some text data")
-            target_file = f'/Volumes/main/{schema}/{volume}/filesit-with-?-and-#-{random()}.txt'
+            target_file = f"/Volumes/main/{schema}/{volume}/filesit-with-?-and-#-{random()}.txt"
             w.files.upload(target_file, f)
             with w.files.download(target_file).contents as f:
                 assert f.read() == b"some text data"
@@ -231,12 +233,12 @@ def test_files_api_upload_download(ucws, random):
 
 def test_files_api_read_twice_from_one_download(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
             f = io.BytesIO(b"some text data")
-            target_file = f'/Volumes/main/{schema}/{volume}/filesit-{random()}.txt'
+            target_file = f"/Volumes/main/{schema}/{volume}/filesit-{random()}.txt"
             w.files.upload(target_file, f)
 
             res = w.files.download(target_file).contents
@@ -251,48 +253,48 @@ def test_files_api_read_twice_from_one_download(ucws, random):
 
 def test_files_api_delete_file(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
             f = io.BytesIO(b"some text data")
-            target_file = f'/Volumes/main/{schema}/{volume}/filesit-{random()}.txt'
+            target_file = f"/Volumes/main/{schema}/{volume}/filesit-{random()}.txt"
             w.files.upload(target_file, f)
             w.files.delete(target_file)
 
 
 def test_files_api_get_metadata(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
             f = io.BytesIO(b"some text data")
-            target_file = f'/Volumes/main/{schema}/{volume}/filesit-{random()}.txt'
+            target_file = f"/Volumes/main/{schema}/{volume}/filesit-{random()}.txt"
             w.files.upload(target_file, f)
             m = w.files.get_metadata(target_file)
-            assert m.content_type == 'application/octet-stream'
+            assert m.content_type == "application/octet-stream"
             assert m.content_length == 14
             assert m.last_modified is not None
 
 
 def test_files_api_create_directory(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
-            target_directory = f'/Volumes/main/{schema}/{volume}/filesit-{random()}/'
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
+            target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}/"
             w.files.create_directory(target_directory)
 
 
 def test_files_api_list_directory_contents(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
-            target_directory = f'/Volumes/main/{schema}/{volume}/filesit-{random()}'
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
+            target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}"
             w.files.upload(target_directory + "/file1.txt", io.BytesIO(b"some text data"))
             w.files.upload(target_directory + "/file2.txt", io.BytesIO(b"some text data"))
             w.files.upload(target_directory + "/file3.txt", io.BytesIO(b"some text data"))
@@ -303,22 +305,22 @@ def test_files_api_list_directory_contents(ucws, random):
 
 def test_files_api_delete_directory(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
-            target_directory = f'/Volumes/main/{schema}/{volume}/filesit-{random()}/'
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
+            target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}/"
             w.files.create_directory(target_directory)
             w.files.delete_directory(target_directory)
 
 
 def test_files_api_get_directory_metadata(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
-            target_directory = f'/Volumes/main/{schema}/{volume}/filesit-{random()}/'
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
+            target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}/"
             w.files.create_directory(target_directory)
             w.files.get_directory_metadata(target_directory)
 
@@ -326,17 +328,30 @@ def test_files_api_get_directory_metadata(ucws, random):
 @pytest.mark.benchmark
 def test_files_api_download_benchmark(ucws, random):
     w = ucws
-    schema = 'filesit-' + random()
-    volume = 'filesit-' + random()
-    with ResourceWithCleanup.create_schema(w, 'main', schema):
-        with ResourceWithCleanup.create_volume(w, 'main', schema, volume):
+    schema = "filesit-" + random()
+    volume = "filesit-" + random()
+    with ResourceWithCleanup.create_schema(w, "main", schema):
+        with ResourceWithCleanup.create_volume(w, "main", schema, volume):
             # Create a 50 MB file
             f = io.BytesIO(bytes(range(256)) * 200000)
-            target_file = f'/Volumes/main/{schema}/{volume}/filesit-benchmark-{random()}.txt'
+            target_file = f"/Volumes/main/{schema}/{volume}/filesit-benchmark-{random()}.txt"
             w.files.upload(target_file, f)
 
             totals = {}
-            for chunk_size_kb in [20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, None]:
+            for chunk_size_kb in [
+                20,
+                50,
+                100,
+                200,
+                500,
+                1000,
+                2000,
+                5000,
+                10000,
+                20000,
+                50000,
+                None,
+            ]:
                 chunk_size = chunk_size_kb * 1024 if chunk_size_kb else None
                 total = 0
                 count = 10
@@ -349,15 +364,21 @@ def test_files_api_download_benchmark(ucws, random):
                     end = time.time()
                     total += end - start
                 avg_time = total / count
-                logging.info(f"[chunk_size=%s] Average time to download: %f seconds",
-                             str(chunk_size_kb) + 'kb' if chunk_size_kb else 'None', avg_time)
+                logging.info(
+                    f"[chunk_size=%s] Average time to download: %f seconds",
+                    str(chunk_size_kb) + "kb" if chunk_size_kb else "None",
+                    avg_time,
+                )
                 totals[chunk_size_kb] = avg_time
             logging.info("Benchmark results:")
             best: Tuple[Union[int, None], Union[float, None]] = (None, None)
             for k, v in totals.items():
                 if best[1] is None or v < best[1]:
                     best = (k, v)
-                logging.info(f"[chunk_size=%s] Average time to download: %f seconds",
-                             str(k) + 'kb' if k else 'None', v)
+                logging.info(
+                    f"[chunk_size=%s] Average time to download: %f seconds",
+                    str(k) + "kb" if k else "None",
+                    v,
+                )
             min_str = str(best[0]) + "kb" if best[0] else "None"
             logging.info("Fastest chunk size: %s in %f seconds", min_str, best[1])
diff --git a/tests/integration/test_iam.py b/tests/integration/test_iam.py
index f8d7c3b1f..cc40c039c 100644
--- a/tests/integration/test_iam.py
+++ b/tests/integration/test_iam.py
@@ -5,7 +5,7 @@
 
 
 def test_filtering_groups(w, random):
-    all = w.groups.list(filter=f'displayName eq any-{random(12)}')
+    all = w.groups.list(filter=f"displayName eq any-{random(12)}")
     found = len(list(all))
     assert found == 0
 
@@ -26,12 +26,18 @@ def test_scim_get_user_as_dict(w):
 
 @pytest.mark.parametrize(
     "path,call",
-    [("/api/2.0/preview/scim/v2/Users", lambda w: w.users.list(count=10)),
-     ("/api/2.0/preview/scim/v2/Groups", lambda w: w.groups.list(count=4)),
-     ("/api/2.0/preview/scim/v2/ServicePrincipals", lambda w: w.service_principals.list(count=1)), ])
+    [
+        ("/api/2.0/preview/scim/v2/Users", lambda w: w.users.list(count=10)),
+        ("/api/2.0/preview/scim/v2/Groups", lambda w: w.groups.list(count=4)),
+        (
+            "/api/2.0/preview/scim/v2/ServicePrincipals",
+            lambda w: w.service_principals.list(count=1),
+        ),
+    ],
+)
 def test_workspace_users_list_pagination(w, path, call):
-    raw = w.api_client.do('GET', path)
-    total = raw['totalResults']
+    raw = w.api_client.do("GET", path)
+    total = raw["totalResults"]
     all = call(w)
     found = len(list(all))
     assert found == total
@@ -39,12 +45,24 @@ def test_workspace_users_list_pagination(w, path, call):
 
 @pytest.mark.parametrize(
     "path,call",
-    [("/api/2.0/accounts/%s/scim/v2/Users", lambda a: a.users.list(count=3000)),
-     ("/api/2.0/accounts/%s/scim/v2/Groups", lambda a: a.groups.list(count=5)),
-     ("/api/2.0/accounts/%s/scim/v2/ServicePrincipals", lambda a: a.service_principals.list(count=1000)), ])
+    [
+        (
+            "/api/2.0/accounts/%s/scim/v2/Users",
+            lambda a: a.users.list(count=3000),
+        ),
+        (
+            "/api/2.0/accounts/%s/scim/v2/Groups",
+            lambda a: a.groups.list(count=5),
+        ),
+        (
+            "/api/2.0/accounts/%s/scim/v2/ServicePrincipals",
+            lambda a: a.service_principals.list(count=1000),
+        ),
+    ],
+)
 def test_account_users_list_pagination(a, path, call):
-    raw = a.api_client.do('GET', path.replace("%s", a.config.account_id))
-    total = raw['totalResults']
+    raw = a.api_client.do("GET", path.replace("%s", a.config.account_id))
+    total = raw["totalResults"]
     all = call(a)
     found = len(list(all))
     assert found == total
diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py
index 768752a75..cfc8de0b7 100644
--- a/tests/integration/test_jobs.py
+++ b/tests/integration/test_jobs.py
@@ -5,7 +5,7 @@
 def test_jobs(w):
     found = 0
     for job in w.jobs.list():
-        logging.info(f'Looking at {job.settings.name}')
+        logging.info(f"Looking at {job.settings.name}")
         found += 1
     assert found > 0
 
@@ -13,33 +13,35 @@ def test_jobs(w):
 def test_submitting_jobs(w, random, env_or_skip):
     from databricks.sdk.service import compute, jobs
 
-    py_on_dbfs = f'/home/{w.current_user.me().user_name}/sample.py'
+    py_on_dbfs = f"/home/{w.current_user.me().user_name}/sample.py"
     with w.dbfs.open(py_on_dbfs, write=True, overwrite=True) as f:
         f.write(b'import time; time.sleep(10); print("Hello, World!")')
 
     waiter = w.jobs.submit(
-        run_name=f'py-sdk-{random(8)}',
+        run_name=f"py-sdk-{random(8)}",
         tasks=[
             jobs.SubmitTask(
-                task_key='pi',
+                task_key="pi",
                 new_cluster=compute.ClusterSpec(
                     spark_version=w.clusters.select_spark_version(long_term_support=True),
                     # node_type_id=w.clusters.select_node_type(local_disk=True),
-                    instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
-                    num_workers=1),
-                spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'),
+                    instance_pool_id=env_or_skip("TEST_INSTANCE_POOL_ID"),
+                    num_workers=1,
+                ),
+                spark_python_task=jobs.SparkPythonTask(python_file=f"dbfs:{py_on_dbfs}"),
             )
-        ])
+        ],
+    )
 
-    logging.info(f'starting to poll: {waiter.run_id}')
+    logging.info(f"starting to poll: {waiter.run_id}")
 
     def print_status(run: jobs.Run):
-        statuses = [f'{t.task_key}: {t.state.life_cycle_state}' for t in run.tasks]
+        statuses = [f"{t.task_key}: {t.state.life_cycle_state}" for t in run.tasks]
         logging.info(f'workflow intermediate status: {", ".join(statuses)}')
 
     run = waiter.result(timeout=datetime.timedelta(minutes=15), callback=print_status)
 
-    logging.info(f'job finished: {run.run_page_url}')
+    logging.info(f"job finished: {run.run_page_url}")
 
 
 def test_last_job_runs(w):
@@ -63,28 +65,35 @@ def test_last_job_runs(w):
 
     summary = []
     for job_id, run in latest_state.items():
-        summary.append({
-            'job_name': all_jobs[job_id].settings.name,
-            'last_status': run.state.result_state,
-            'last_finished': datetime.fromtimestamp(run.end_time / 1000, timezone.utc),
-            'average_duration': sum(durations[job_id]) / len(durations[job_id])
-        })
+        summary.append(
+            {
+                "job_name": all_jobs[job_id].settings.name,
+                "last_status": run.state.result_state,
+                "last_finished": datetime.fromtimestamp(run.end_time / 1000, timezone.utc),
+                "average_duration": sum(durations[job_id]) / len(durations[job_id]),
+            }
+        )
 
-    for line in sorted(summary, key=lambda s: s['last_finished'], reverse=True):
-        logging.info(f'Latest: {line}')
+    for line in sorted(summary, key=lambda s: s["last_finished"], reverse=True):
+        logging.info(f"Latest: {line}")
 
 
 def test_create_job(w):
     from databricks.sdk.service import compute, jobs
 
-    cluster = jobs.JobCluster(job_cluster_key="cluster1",
-                              new_cluster=compute.ClusterSpec(
-                                  num_workers=2,
-                                  spark_version=w.clusters.select_spark_version(),
-                                  node_type_id=w.clusters.select_node_type(local_disk=True)))
-
-    task1 = jobs.Task(task_key="task1",
-                      job_cluster_key="cluster1",
-                      python_wheel_task=jobs.PythonWheelTask(entry_point="test", package_name="deepspeed"))
+    cluster = jobs.JobCluster(
+        job_cluster_key="cluster1",
+        new_cluster=compute.ClusterSpec(
+            num_workers=2,
+            spark_version=w.clusters.select_spark_version(),
+            node_type_id=w.clusters.select_node_type(local_disk=True),
+        ),
+    )
+
+    task1 = jobs.Task(
+        task_key="task1",
+        job_cluster_key="cluster1",
+        python_wheel_task=jobs.PythonWheelTask(entry_point="test", package_name="deepspeed"),
+    )
 
     w.jobs.create(job_clusters=[cluster], tasks=[task1])
diff --git a/tests/integration/test_repos.py b/tests/integration/test_repos.py
index ebba9e155..1682d9b87 100644
--- a/tests/integration/test_repos.py
+++ b/tests/integration/test_repos.py
@@ -3,4 +3,4 @@
 
 def test_repos_list(w):
     for repo in w.repos.list():
-        logging.info(f'Found repo: {repo}')
\ No newline at end of file
+        logging.info(f"Found repo: {repo}")
diff --git a/tests/integration/test_sql.py b/tests/integration/test_sql.py
index af368609b..4ab775a0a 100644
--- a/tests/integration/test_sql.py
+++ b/tests/integration/test_sql.py
@@ -6,10 +6,14 @@
 def test_query_history_list_with_filter(w):
 
     def date_to_ms(date):
-        return int(datetime.strptime(date, '%Y-%m-%d').timestamp() * 1000)
+        return int(datetime.strptime(date, "%Y-%m-%d").timestamp() * 1000)
 
-    filter = QueryFilter(query_start_time_range=TimeRange(start_time_ms=date_to_ms('2023-01-01'),
-                                                          end_time_ms=date_to_ms('2023-01-02')))
+    filter = QueryFilter(
+        query_start_time_range=TimeRange(
+            start_time_ms=date_to_ms("2023-01-01"),
+            end_time_ms=date_to_ms("2023-01-02"),
+        )
+    )
     queries = w.query_history.list(filter_by=filter)
     for q in queries.res:
         print(q)
diff --git a/tests/integration/test_workspace.py b/tests/integration/test_workspace.py
index 4adbee773..a3b4fd9c5 100644
--- a/tests/integration/test_workspace.py
+++ b/tests/integration/test_workspace.py
@@ -5,14 +5,14 @@
 
 def test_workspace_recursive_list(w, workspace_dir, random):
     # create a file in the directory
-    file = f'{workspace_dir}/file-{random(12)}.py'
-    w.workspace.upload(file, io.BytesIO(b'print(1)'))
+    file = f"{workspace_dir}/file-{random(12)}.py"
+    w.workspace.upload(file, io.BytesIO(b"print(1)"))
     # create a subdirectory
-    subdirectory = f'{workspace_dir}/subdir-{random(12)}'
+    subdirectory = f"{workspace_dir}/subdir-{random(12)}"
     w.workspace.mkdirs(subdirectory)
     # create a file in the subdirectory
-    subfile = f'{subdirectory}/subfile-{random(12)}.py'
-    w.workspace.upload(subfile, io.BytesIO(b'print(2)'))
+    subfile = f"{subdirectory}/subfile-{random(12)}.py"
+    w.workspace.upload(subfile, io.BytesIO(b"print(2)"))
     # list the directory recursively
     names = []
     for i in w.workspace.list(workspace_dir, recursive=True):
@@ -21,73 +21,78 @@ def test_workspace_recursive_list(w, workspace_dir, random):
 
 
 def test_workspace_upload_download_notebooks(w, random):
-    notebook = f'/Users/{w.current_user.me().user_name}/notebook-{random(12)}.py'
+    notebook = f"/Users/{w.current_user.me().user_name}/notebook-{random(12)}.py"
 
-    w.workspace.upload(notebook, io.BytesIO(b'print(1)'))
+    w.workspace.upload(notebook, io.BytesIO(b"print(1)"))
     with w.workspace.download(notebook) as f:
         content = f.read()
-        assert content == b'# Databricks notebook source\nprint(1)'
+        assert content == b"# Databricks notebook source\nprint(1)"
 
     w.workspace.delete(notebook)
 
 
 def test_workspace_unzip_notebooks(w, random):
-    notebook = f'/Users/{w.current_user.me().user_name}/notebook-{random(12)}.py'
+    notebook = f"/Users/{w.current_user.me().user_name}/notebook-{random(12)}.py"
 
     # Big notebooks can be gzipped during transfer by the API (out of our control)
     # Creating some large content to trigger this behaviour
-    notebook_content = ('print(1)\n' * 1000).strip('\n')
+    notebook_content = ("print(1)\n" * 1000).strip("\n")
 
-    w.workspace.upload(notebook, io.BytesIO(bytes(notebook_content, 'utf-8')))
+    w.workspace.upload(notebook, io.BytesIO(bytes(notebook_content, "utf-8")))
     with w.workspace.download(notebook) as f:
         content = f.read()
-        expected_content = bytes(f'# Databricks notebook source\n{notebook_content}', 'utf-8')
+        expected_content = bytes(f"# Databricks notebook source\n{notebook_content}", "utf-8")
         assert content == expected_content
 
     w.workspace.delete(notebook)
 
 
 def test_workspace_download_connection_closed(w, random):
-    notebook = f'/Users/{w.current_user.me().user_name}/notebook-{random(12)}.py'
+    notebook = f"/Users/{w.current_user.me().user_name}/notebook-{random(12)}.py"
 
-    w.workspace.upload(notebook, io.BytesIO(b'print(1)'))
+    w.workspace.upload(notebook, io.BytesIO(b"print(1)"))
 
     for n in range(30):
         with w.workspace.download(notebook) as f:
             content = f.read()
-            assert content == b'# Databricks notebook source\nprint(1)'
+            assert content == b"# Databricks notebook source\nprint(1)"
 
     w.workspace.delete(notebook)
 
 
 def test_workspace_upload_download_files(w, random):
-    py_file = f'/Users/{w.current_user.me().user_name}/file-{random(12)}.py'
+    py_file = f"/Users/{w.current_user.me().user_name}/file-{random(12)}.py"
 
-    w.workspace.upload(py_file, io.BytesIO(b'print(1)'), format=ImportFormat.AUTO)
+    w.workspace.upload(py_file, io.BytesIO(b"print(1)"), format=ImportFormat.AUTO)
     with w.workspace.download(py_file) as f:
         content = f.read()
-        assert content == b'print(1)'
+        assert content == b"print(1)"
 
     w.workspace.delete(py_file)
 
 
 def test_workspace_upload_download_txt_files(w, random):
-    txt_file = f'/Users/{w.current_user.me().user_name}/txt-{random(12)}.txt'
+    txt_file = f"/Users/{w.current_user.me().user_name}/txt-{random(12)}.txt"
 
-    w.workspace.upload(txt_file, io.BytesIO(b'print(1)'), format=ImportFormat.AUTO)
+    w.workspace.upload(txt_file, io.BytesIO(b"print(1)"), format=ImportFormat.AUTO)
     with w.workspace.download(txt_file) as f:
         content = f.read()
-        assert content == b'print(1)'
+        assert content == b"print(1)"
 
     w.workspace.delete(txt_file)
 
 
 def test_workspace_upload_download_notebooks_no_extension(w, random):
-    nb = f'/Users/{w.current_user.me().user_name}/notebook-{random(12)}'
-
-    w.workspace.upload(nb, io.BytesIO(b'print(1)'), format=ImportFormat.SOURCE, language=Language.PYTHON)
+    nb = f"/Users/{w.current_user.me().user_name}/notebook-{random(12)}"
+
+    w.workspace.upload(
+        nb,
+        io.BytesIO(b"print(1)"),
+        format=ImportFormat.SOURCE,
+        language=Language.PYTHON,
+    )
     with w.workspace.download(nb) as f:
         content = f.read()
-        assert content == b'# Databricks notebook source\nprint(1)'
+        assert content == b"# Databricks notebook source\nprint(1)"
 
     w.workspace.delete(nb)
diff --git a/tests/test_auth.py b/tests/test_auth.py
index cd8f3cfc1..009300faa 100644
--- a/tests/test_auth.py
+++ b/tests/test_auth.py
@@ -4,10 +4,11 @@
 
 from .conftest import __tests__, raises, set_az_path, set_home
 
-default_auth_base_error_message = \
-    "default auth: cannot configure default credentials, " \
-    "please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication " \
+default_auth_base_error_message = (
+    "default auth: cannot configure default credentials, "
+    "please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication "
     "to configure credentials for your preferred authentication method"
+)
 
 
 # This test uses the fake file system to avoid interference from local default profile.
@@ -18,150 +19,153 @@ def test_config_no_params(fake_fs):
 
 @raises(f"{default_auth_base_error_message}. Config: host=https://x. Env: DATABRICKS_HOST")
 def test_config_host_env(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
+    monkeypatch.setenv("DATABRICKS_HOST", "x")
     Config()
 
 
 @raises(f"{default_auth_base_error_message}. Config: token=***. Env: DATABRICKS_TOKEN")
 def test_config_token_env(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
+    monkeypatch.setenv("DATABRICKS_TOKEN", "x")
     Config()
 
 
 def test_config_host_token_env(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
+    monkeypatch.setenv("DATABRICKS_HOST", "x")
+    monkeypatch.setenv("DATABRICKS_TOKEN", "x")
     cfg = Config()
 
-    assert cfg.auth_type == 'pat'
-    assert cfg.host == 'https://x'
+    assert cfg.auth_type == "pat"
+    assert cfg.host == "https://x"
 
 
 def test_config_host_param_token_env(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
-    cfg = Config(host='https://x')
+    monkeypatch.setenv("DATABRICKS_TOKEN", "x")
+    cfg = Config(host="https://x")
 
-    assert cfg.auth_type == 'pat'
-    assert cfg.host == 'https://x'
+    assert cfg.auth_type == "pat"
+    assert cfg.host == "https://x"
 
 
 @raises(
     f"{default_auth_base_error_message}. Config: username=x, password=***. Env: DATABRICKS_USERNAME, DATABRICKS_PASSWORD"
 )
 def test_config_user_password_env(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'x')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "x")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
     cfg = Config()
 
-    assert cfg.host == 'https://x'
+    assert cfg.host == "https://x"
 
 
 def test_config_basic_auth(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'x')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
+    monkeypatch.setenv("DATABRICKS_HOST", "x")
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "x")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
     cfg = Config()
 
-    assert cfg.auth_type == 'basic'
-    assert cfg.host == 'https://x'
+    assert cfg.auth_type == "basic"
+    assert cfg.host == "https://x"
 
 
 def test_config_attribute_precedence(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'x')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
-    cfg = Config(host='y')
+    monkeypatch.setenv("DATABRICKS_HOST", "x")
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "x")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
+    cfg = Config(host="y")
 
-    assert cfg.auth_type == 'basic'
-    assert cfg.host == 'https://y'
+    assert cfg.auth_type == "basic"
+    assert cfg.host == "https://y"
 
 
 def test_config_basic_auth_mix(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'x')
-    cfg = Config(host='y', username='x')
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "x")
+    cfg = Config(host="y", username="x")
 
-    assert cfg.auth_type == 'basic'
-    assert cfg.host == 'https://y'
+    assert cfg.auth_type == "basic"
+    assert cfg.host == "https://y"
 
 
 def test_config_basic_auth_attrs():
-    cfg = Config(host='y', username='x', password='x')
+    cfg = Config(host="y", username="x", password="x")
 
-    assert cfg.auth_type == 'basic'
-    assert cfg.host == 'https://y'
+    assert cfg.auth_type == "basic"
+    assert cfg.host == "https://y"
 
 
 @raises(
     "validate: more than one authorization method configured: basic and pat. Config: host=https://x, token=***, username=x, password=***. Env: DATABRICKS_HOST, DATABRICKS_TOKEN, DATABRICKS_USERNAME, DATABRICKS_PASSWORD"
 )
 def test_config_conflicting_envs(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'x')
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
+    monkeypatch.setenv("DATABRICKS_HOST", "x")
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "x")
+    monkeypatch.setenv("DATABRICKS_TOKEN", "x")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
     Config()
 
 
 def test_config_conflicting_envs_auth_type(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'x')
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
-    cfg = Config(auth_type='basic')
+    monkeypatch.setenv("DATABRICKS_HOST", "x")
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "x")
+    monkeypatch.setenv("DATABRICKS_TOKEN", "x")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
+    cfg = Config(auth_type="basic")
 
-    assert cfg.auth_type == 'basic'
-    assert cfg.host == 'https://x'
+    assert cfg.auth_type == "basic"
+    assert cfg.host == "https://x"
 
 
 @raises(f"{default_auth_base_error_message}. Config: config_file=x. Env: DATABRICKS_CONFIG_FILE")
 def test_config_config_file(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x')
+    monkeypatch.setenv("DATABRICKS_CONFIG_FILE", "x")
     Config()
 
 
 @raises(f"{default_auth_base_error_message}. Config: host=https://x")
-def test_config_config_file_skip_default_profile_if_host_specified(monkeypatch):
-    set_home(monkeypatch, '/testdata')
-    cfg = Config(host='x')
+def test_config_config_file_skip_default_profile_if_host_specified(
+    monkeypatch,
+):
+    set_home(monkeypatch, "/testdata")
+    cfg = Config(host="x")
 
 
 @raises(default_auth_base_error_message)
-def test_config_config_file_with_empty_default_profile_select_default(monkeypatch):
-    set_home(monkeypatch, '/testdata/empty_default')
+def test_config_config_file_with_empty_default_profile_select_default(
+    monkeypatch,
+):
+    set_home(monkeypatch, "/testdata/empty_default")
     Config()
 
 
 def test_config_config_file_with_empty_default_profile_select_abc(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'abc')
-    set_home(monkeypatch, '/testdata/empty_default')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "abc")
+    set_home(monkeypatch, "/testdata/empty_default")
     cfg = Config()
 
-    assert cfg.auth_type == 'pat'
-    assert cfg.host == 'https://foo'
+    assert cfg.auth_type == "pat"
+    assert cfg.host == "https://foo"
 
 
 def test_config_pat_from_databricks_cfg(monkeypatch):
-    set_home(monkeypatch, '/testdata')
+    set_home(monkeypatch, "/testdata")
     cfg = Config()
 
-    assert cfg.auth_type == 'pat'
-    assert cfg.host == 'https://dbc-XXXXXXXX-YYYY.cloud.databricks.com'
+    assert cfg.auth_type == "pat"
+    assert cfg.host == "https://dbc-XXXXXXXX-YYYY.cloud.databricks.com"
 
 
 def test_config_pat_from_databricks_cfg_dot_profile(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'pat.with.dot')
-    set_home(monkeypatch, '/testdata')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "pat.with.dot")
+    set_home(monkeypatch, "/testdata")
     cfg = Config()
 
-    assert cfg.auth_type == 'pat'
-    assert cfg.host == 'https://dbc-XXXXXXXX-YYYY.cloud.databricks.com'
+    assert cfg.auth_type == "pat"
+    assert cfg.host == "https://dbc-XXXXXXXX-YYYY.cloud.databricks.com"
 
 
-@raises(
-    f"{default_auth_base_error_message}. Config: token=***, profile=nohost. Env: DATABRICKS_CONFIG_PROFILE")
+@raises(f"{default_auth_base_error_message}. Config: token=***, profile=nohost. Env: DATABRICKS_CONFIG_PROFILE")
 def test_config_pat_from_databricks_cfg_nohost_profile(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost')
-    set_home(monkeypatch, '/testdata')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "nohost")
+    set_home(monkeypatch, "/testdata")
     Config()
 
 
@@ -169,9 +173,9 @@ def test_config_pat_from_databricks_cfg_nohost_profile(monkeypatch):
     f"{default_auth_base_error_message}. Config: token=***, profile=nohost. Env: DATABRICKS_TOKEN, DATABRICKS_CONFIG_PROFILE"
 )
 def test_config_config_profile_and_token(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost')
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'x')
-    set_home(monkeypatch, '/testdata')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "nohost")
+    monkeypatch.setenv("DATABRICKS_TOKEN", "x")
+    set_home(monkeypatch, "/testdata")
     Config()
 
 
@@ -179,28 +183,31 @@ def test_config_config_profile_and_token(monkeypatch):
     "validate: more than one authorization method configured: basic and pat. Config: token=***, username=x, profile=nohost. Env: DATABRICKS_USERNAME, DATABRICKS_CONFIG_PROFILE"
 )
 def test_config_config_profile_and_password(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
-    set_home(monkeypatch, '/testdata')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "nohost")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
+    set_home(monkeypatch, "/testdata")
     Config()
 
 
 def test_config_azure_pat():
-    cfg = Config(host='https://adb-xxx.y.azuredatabricks.net/', token='y')
+    cfg = Config(host="https://adb-xxx.y.azuredatabricks.net/", token="y")
 
-    assert cfg.auth_type == 'pat'
-    assert cfg.host == 'https://adb-xxx.y.azuredatabricks.net'
+    assert cfg.auth_type == "pat"
+    assert cfg.host == "https://adb-xxx.y.azuredatabricks.net"
     assert cfg.is_azure
 
 
 def test_config_azure_cli_host(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
-
-    assert cfg.auth_type == 'azure-cli'
-    assert cfg.host == 'https://adb-123.4.azuredatabricks.net'
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    cfg = Config(
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id="/sub/rg/ws",
+    )
+
+    assert cfg.auth_type == "azure-cli"
+    assert cfg.host == "https://adb-123.4.azuredatabricks.net"
     assert cfg.is_azure
 
 
@@ -208,48 +215,56 @@ def test_config_azure_cli_host(monkeypatch, mock_tenant):
     "default auth: cannot configure default credentials, please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication to configure credentials for your preferred authentication method. Config: azure_workspace_resource_id=/sub/rg/ws"
 )
 def test_config_azure_cli_host_fail(monkeypatch):
-    monkeypatch.setenv('FAIL', 'yes')
-    set_home(monkeypatch, '/testdata/azure')
+    monkeypatch.setenv("FAIL", "yes")
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    cfg = Config(azure_workspace_resource_id='/sub/rg/ws')
+    cfg = Config(azure_workspace_resource_id="/sub/rg/ws")
 
 
 @raises(f"{default_auth_base_error_message}. Config: azure_workspace_resource_id=/sub/rg/ws")
 def test_config_azure_cli_host_az_not_installed(monkeypatch):
-    set_home(monkeypatch, '/testdata/azure')
-    monkeypatch.setenv('PATH', __tests__ + '/whatever')
-    cfg = Config(azure_workspace_resource_id='/sub/rg/ws')
+    set_home(monkeypatch, "/testdata/azure")
+    monkeypatch.setenv("PATH", __tests__ + "/whatever")
+    cfg = Config(azure_workspace_resource_id="/sub/rg/ws")
 
 
 @raises(
     "validate: more than one authorization method configured: azure and pat. Config: token=***, azure_workspace_resource_id=/sub/rg/ws"
 )
-def test_config_azure_cli_host_pat_conflict_with_config_file_present_without_default_profile(monkeypatch):
-    set_home(monkeypatch, '/testdata/azure')
+def test_config_azure_cli_host_pat_conflict_with_config_file_present_without_default_profile(
+    monkeypatch,
+):
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    cfg = Config(token='x', azure_workspace_resource_id='/sub/rg/ws')
+    cfg = Config(token="x", azure_workspace_resource_id="/sub/rg/ws")
 
 
 def test_config_azure_cli_host_and_resource_id(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata')
+    set_home(monkeypatch, "/testdata")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
-
-    assert cfg.auth_type == 'azure-cli'
-    assert cfg.host == 'https://adb-123.4.azuredatabricks.net'
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    cfg = Config(
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id="/sub/rg/ws",
+    )
+
+    assert cfg.auth_type == "azure-cli"
+    assert cfg.host == "https://adb-123.4.azuredatabricks.net"
     assert cfg.is_azure
 
 
 def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeypatch, mock_tenant):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'justhost')
-    set_home(monkeypatch, '/testdata/azure')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "justhost")
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
-
-    assert cfg.auth_type == 'azure-cli'
-    assert cfg.host == 'https://adb-123.4.azuredatabricks.net'
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    cfg = Config(
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id="/sub/rg/ws",
+    )
+
+    assert cfg.auth_type == "azure-cli"
+    assert cfg.host == "https://adb-123.4.azuredatabricks.net"
     assert cfg.is_azure
 
 
@@ -257,27 +272,30 @@ def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeyp
     "validate: more than one authorization method configured: azure and basic. Config: host=https://adb-123.4.azuredatabricks.net, username=x, azure_workspace_resource_id=/sub/rg/ws. Env: DATABRICKS_USERNAME"
 )
 def test_config_azure_and_password_conflict(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'x')
-    set_home(monkeypatch, '/testdata/azure')
+    monkeypatch.setenv("DATABRICKS_USERNAME", "x")
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws')
+    cfg = Config(
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id="/sub/rg/ws",
+    )
 
 
 @raises(
     "resolve: testdata/corrupt/.databrickscfg has no DEFAULT profile configured. Config: profile=DEFAULT. Env: DATABRICKS_CONFIG_PROFILE"
 )
 def test_config_corrupt_config(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'DEFAULT')
-    set_home(monkeypatch, '/testdata/corrupt')
+    monkeypatch.setenv("DATABRICKS_CONFIG_PROFILE", "DEFAULT")
+    set_home(monkeypatch, "/testdata/corrupt")
     Config()
 
 
 def test_config_auth_type_from_env(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_AUTH_TYPE', 'basic')
-    monkeypatch.setenv('DATABRICKS_PASSWORD', 'password')
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'token')
-    monkeypatch.setenv('DATABRICKS_USERNAME', 'user')
-    cfg = Config(host='x')
-
-    assert cfg.auth_type == 'basic'
-    assert cfg.host == 'https://x'
+    monkeypatch.setenv("DATABRICKS_AUTH_TYPE", "basic")
+    monkeypatch.setenv("DATABRICKS_PASSWORD", "password")
+    monkeypatch.setenv("DATABRICKS_TOKEN", "token")
+    monkeypatch.setenv("DATABRICKS_USERNAME", "user")
+    cfg = Config(host="x")
+
+    assert cfg.auth_type == "basic"
+    assert cfg.host == "https://x"
diff --git a/tests/test_auth_manual_tests.py b/tests/test_auth_manual_tests.py
index 8c58dd6bf..f66e92ea8 100644
--- a/tests/test_auth_manual_tests.py
+++ b/tests/test_auth_manual_tests.py
@@ -6,69 +6,83 @@
 
 
 def test_azure_cli_workspace_header_present(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
-    cfg = Config(auth_type='azure-cli',
-                 host='https://adb-123.4.azuredatabricks.net',
-                 azure_workspace_resource_id=resource_id)
-    assert 'X-Databricks-Azure-Workspace-Resource-Id' in cfg.authenticate()
-    assert cfg.authenticate()['X-Databricks-Azure-Workspace-Resource-Id'] == resource_id
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    resource_id = "/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123"
+    cfg = Config(
+        auth_type="azure-cli",
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id=resource_id,
+    )
+    assert "X-Databricks-Azure-Workspace-Resource-Id" in cfg.authenticate()
+    assert cfg.authenticate()["X-Databricks-Azure-Workspace-Resource-Id"] == resource_id
 
 
 def test_azure_cli_user_with_management_access(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
-    cfg = Config(auth_type='azure-cli',
-                 host='https://adb-123.4.azuredatabricks.net',
-                 azure_workspace_resource_id=resource_id)
-    assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    resource_id = "/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123"
+    cfg = Config(
+        auth_type="azure-cli",
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id=resource_id,
+    )
+    assert "X-Databricks-Azure-SP-Management-Token" in cfg.authenticate()
 
 
 def test_azure_cli_user_no_management_access(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    monkeypatch.setenv('FAIL_IF', 'https://management.core.windows.net/')
-    resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
-    cfg = Config(auth_type='azure-cli',
-                 host='https://adb-123.4.azuredatabricks.net',
-                 azure_workspace_resource_id=resource_id)
-    assert 'X-Databricks-Azure-SP-Management-Token' not in cfg.authenticate()
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    monkeypatch.setenv("FAIL_IF", "https://management.core.windows.net/")
+    resource_id = "/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123"
+    cfg = Config(
+        auth_type="azure-cli",
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id=resource_id,
+    )
+    assert "X-Databricks-Azure-SP-Management-Token" not in cfg.authenticate()
 
 
 def test_azure_cli_fallback(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    monkeypatch.setenv('FAIL_IF', 'subscription')
-    resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
-    cfg = Config(auth_type='azure-cli',
-                 host='https://adb-123.4.azuredatabricks.net',
-                 azure_workspace_resource_id=resource_id)
-    assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    monkeypatch.setenv("FAIL_IF", "subscription")
+    resource_id = "/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123"
+    cfg = Config(
+        auth_type="azure-cli",
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id=resource_id,
+    )
+    assert "X-Databricks-Azure-SP-Management-Token" in cfg.authenticate()
 
 
 def test_azure_cli_with_warning_on_stderr(monkeypatch, mock_tenant):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    mock_tenant('adb-123.4.azuredatabricks.net')
-    monkeypatch.setenv('WARN', 'this is a warning')
-    resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123'
-    cfg = Config(auth_type='azure-cli',
-                 host='https://adb-123.4.azuredatabricks.net',
-                 azure_workspace_resource_id=resource_id)
-    assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
+    mock_tenant("adb-123.4.azuredatabricks.net")
+    monkeypatch.setenv("WARN", "this is a warning")
+    resource_id = "/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123"
+    cfg = Config(
+        auth_type="azure-cli",
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_workspace_resource_id=resource_id,
+    )
+    assert "X-Databricks-Azure-SP-Management-Token" in cfg.authenticate()
 
 
-@pytest.mark.parametrize('username', ['systemAssignedIdentity', 'userAssignedIdentity'])
+@pytest.mark.parametrize("username", ["systemAssignedIdentity", "userAssignedIdentity"])
 def test_azure_cli_does_not_specify_tenant_id_with_msi(monkeypatch, username):
-    set_home(monkeypatch, '/testdata/azure')
+    set_home(monkeypatch, "/testdata/azure")
     set_az_path(monkeypatch)
-    monkeypatch.setenv('FAIL_IF_TENANT_ID_SET', 'true')
-    monkeypatch.setenv('AZ_USER_NAME', username)
-    monkeypatch.setenv('AZ_USER_TYPE', 'servicePrincipal')
-    cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', azure_tenant_id='abc')
+    monkeypatch.setenv("FAIL_IF_TENANT_ID_SET", "true")
+    monkeypatch.setenv("AZ_USER_NAME", username)
+    monkeypatch.setenv("AZ_USER_TYPE", "servicePrincipal")
+    cfg = Config(
+        auth_type="azure-cli",
+        host="https://adb-123.4.azuredatabricks.net",
+        azure_tenant_id="abc",
+    )
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index 16a8ecfc4..820c52c11 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -60,72 +60,123 @@ def test_streaming_response_read_closes(config):
     assert dummy_response.isClosed()
 
 
-@pytest.mark.parametrize('status_code,headers,body,expected_error', [
-    (400, {}, {
-        "message":
-        "errorMessage",
-        "details": [{
-            "type": DatabricksError._error_info_type,
-            "reason": "error reason",
-            "domain": "error domain",
-            "metadata": {
-                "etag": "error etag"
+@pytest.mark.parametrize(
+    "status_code,headers,body,expected_error",
+    [
+        (
+            400,
+            {},
+            {
+                "message": "errorMessage",
+                "details": [
+                    {
+                        "type": DatabricksError._error_info_type,
+                        "reason": "error reason",
+                        "domain": "error domain",
+                        "metadata": {"etag": "error etag"},
+                    },
+                    {
+                        "type": "wrong type",
+                        "reason": "wrong reason",
+                        "domain": "wrong domain",
+                        "metadata": {"etag": "wrong etag"},
+                    },
+                ],
+            },
+            errors.BadRequest(
+                "errorMessage",
+                details=[
+                    {
+                        "type": DatabricksError._error_info_type,
+                        "reason": "error reason",
+                        "domain": "error domain",
+                        "metadata": {"etag": "error etag"},
+                    }
+                ],
+            ),
+        ),
+        (
+            401,
+            {},
+            {
+                "error_code": "UNAUTHORIZED",
+                "message": "errorMessage",
+            },
+            errors.Unauthenticated("errorMessage", error_code="UNAUTHORIZED"),
+        ),
+        (
+            403,
+            {},
+            {
+                "error_code": "FORBIDDEN",
+                "message": "errorMessage",
+            },
+            errors.PermissionDenied("errorMessage", error_code="FORBIDDEN"),
+        ),
+        (
+            429,
+            {},
+            {
+                "error_code": "TOO_MANY_REQUESTS",
+                "message": "errorMessage",
+            },
+            errors.TooManyRequests(
+                "errorMessage",
+                error_code="TOO_MANY_REQUESTS",
+                retry_after_secs=1,
+            ),
+        ),
+        (
+            429,
+            {"Retry-After": "100"},
+            {
+                "error_code": "TOO_MANY_REQUESTS",
+                "message": "errorMessage",
             },
-        }, {
-            "type": "wrong type",
-            "reason": "wrong reason",
-            "domain": "wrong domain",
-            "metadata": {
-                "etag": "wrong etag"
-            }
-        }],
-    },
-     errors.BadRequest('errorMessage',
-                       details=[{
-                           'type': DatabricksError._error_info_type,
-                           'reason': 'error reason',
-                           'domain': 'error domain',
-                           'metadata': {
-                               'etag': 'error etag'
-                           },
-                       }])),
-    (401, {}, {
-        'error_code': 'UNAUTHORIZED',
-        'message': 'errorMessage',
-    }, errors.Unauthenticated('errorMessage', error_code='UNAUTHORIZED')),
-    (403, {}, {
-        'error_code': 'FORBIDDEN',
-        'message': 'errorMessage',
-    }, errors.PermissionDenied('errorMessage', error_code='FORBIDDEN')),
-    (429, {}, {
-        'error_code': 'TOO_MANY_REQUESTS',
-        'message': 'errorMessage',
-    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=1)),
-    (429, {
-        'Retry-After': '100'
-    }, {
-        'error_code': 'TOO_MANY_REQUESTS',
-        'message': 'errorMessage',
-    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=100)),
-    (503, {}, {
-        'error_code': 'TEMPORARILY_UNAVAILABLE',
-        'message': 'errorMessage',
-    }, errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
-                                     retry_after_secs=1)),
-    (503, {
-        'Retry-After': '100'
-    }, {
-        'error_code': 'TEMPORARILY_UNAVAILABLE',
-        'message': 'errorMessage',
-    },
-     errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
-                                   retry_after_secs=100)),
-    (404, {}, {
-        'scimType': 'scim type',
-        'detail': 'detail',
-        'status': 'status',
-    }, errors.NotFound('scim type detail', error_code='SCIM_status')),
-])
+            errors.TooManyRequests(
+                "errorMessage",
+                error_code="TOO_MANY_REQUESTS",
+                retry_after_secs=100,
+            ),
+        ),
+        (
+            503,
+            {},
+            {
+                "error_code": "TEMPORARILY_UNAVAILABLE",
+                "message": "errorMessage",
+            },
+            errors.TemporarilyUnavailable(
+                "errorMessage",
+                error_code="TEMPORARILY_UNAVAILABLE",
+                retry_after_secs=1,
+            ),
+        ),
+        (
+            503,
+            {"Retry-After": "100"},
+            {
+                "error_code": "TEMPORARILY_UNAVAILABLE",
+                "message": "errorMessage",
+            },
+            errors.TemporarilyUnavailable(
+                "errorMessage",
+                error_code="TEMPORARILY_UNAVAILABLE",
+                retry_after_secs=100,
+            ),
+        ),
+        (
+            404,
+            {},
+            {
+                "scimType": "scim type",
+                "detail": "detail",
+                "status": "status",
+            },
+            errors.NotFound("scim type detail", error_code="SCIM_status"),
+        ),
+    ],
+)
 def test_error(requests_mock, status_code, headers, body, expected_error):
     client = _BaseClient(clock=FakeClock())
     requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
@@ -136,7 +187,10 @@ def test_error(requests_mock, status_code, headers, body, expected_error):
     assert str(actual) == str(expected_error)
     assert actual.error_code == expected_error.error_code
     assert actual.retry_after_secs == expected_error.retry_after_secs
-    expected_error_infos, actual_error_infos = expected_error.get_error_info(), actual.get_error_info()
+    expected_error_infos, actual_error_infos = (
+        expected_error.get_error_info(),
+        actual.get_error_info(),
+    )
     assert len(expected_error_infos) == len(actual_error_infos)
     for expected, actual in zip(expected_error_infos, actual_error_infos):
         assert expected.type == actual.type
@@ -147,18 +201,19 @@ def test_error(requests_mock, status_code, headers, body, expected_error):
 
 def test_api_client_do_custom_headers(requests_mock):
     client = _BaseClient()
-    requests_mock.get("/test",
-                      json={"well": "done"},
-                      request_headers={
-                          "test": "test",
-                          "User-Agent": useragent.to_string()
-                      })
+    requests_mock.get(
+        "/test",
+        json={"well": "done"},
+        request_headers={"test": "test", "User-Agent": useragent.to_string()},
+    )
     res = client.do("GET", "https://localhost/test", headers={"test": "test"})
     assert res == {"well": "done"}
 
 
-@pytest.mark.parametrize('status_code,include_retry_after',
-                         ((429, False), (429, True), (503, False), (503, True)))
+@pytest.mark.parametrize(
+    "status_code,include_retry_after",
+    ((429, False), (429, True), (503, False), (503, True)),
+)
 def test_http_retry_after(status_code, include_retry_after):
     requests = []
 
@@ -166,20 +221,20 @@ def inner(h: BaseHTTPRequestHandler):
         if len(requests) == 0:
             h.send_response(status_code)
             if include_retry_after:
-                h.send_header('Retry-After', '1')
-            h.send_header('Content-Type', 'application/json')
+                h.send_header("Retry-After", "1")
+            h.send_header("Content-Type", "application/json")
             h.end_headers()
         else:
             h.send_response(200)
-            h.send_header('Content-Type', 'application/json')
+            h.send_header("Content-Type", "application/json")
             h.end_headers()
             h.wfile.write(b'{"foo": 1}')
         requests.append(h.requestline)
 
     with http_fixture_server(inner) as host:
         api_client = _BaseClient(clock=FakeClock())
-        res = api_client.do('GET', f'{host}/foo')
-        assert 'foo' in res
+        res = api_client.do("GET", f"{host}/foo")
+        assert "foo" in res
 
     assert len(requests) == 2
 
@@ -190,19 +245,19 @@ def test_http_retry_after_wrong_format():
     def inner(h: BaseHTTPRequestHandler):
         if len(requests) == 0:
             h.send_response(429)
-            h.send_header('Retry-After', '1.58')
+            h.send_header("Retry-After", "1.58")
             h.end_headers()
         else:
             h.send_response(200)
-            h.send_header('Content-Type', 'application/json')
+            h.send_header("Content-Type", "application/json")
             h.end_headers()
             h.wfile.write(b'{"foo": 1}')
         requests.append(h.requestline)
 
     with http_fixture_server(inner) as host:
         api_client = _BaseClient(clock=FakeClock())
-        res = api_client.do('GET', f'{host}/foo')
-        assert 'foo' in res
+        res = api_client.do("GET", f"{host}/foo")
+        assert "foo" in res
 
     assert len(requests) == 2
 
@@ -212,14 +267,14 @@ def test_http_retried_exceed_limit():
 
     def inner(h: BaseHTTPRequestHandler):
         h.send_response(429)
-        h.send_header('Retry-After', '1')
+        h.send_header("Retry-After", "1")
         h.end_headers()
         requests.append(h.requestline)
 
     with http_fixture_server(inner) as host:
         api_client = _BaseClient(retry_timeout_seconds=1, clock=FakeClock())
         with pytest.raises(TimeoutError):
-            api_client.do('GET', f'{host}/foo')
+            api_client.do("GET", f"{host}/foo")
 
     assert len(requests) == 1
 
@@ -240,8 +295,8 @@ def inner(h: BaseHTTPRequestHandler):
 
     with http_fixture_server(inner) as host:
         api_client = _BaseClient(clock=FakeClock())
-        res = api_client.do('GET', f'{host}/foo')
-        assert 'foo' in res
+        res = api_client.do("GET", f"{host}/foo")
+        assert "foo" in res
 
     assert len(requests) == 2
 
@@ -259,7 +314,7 @@ def inner(h: BaseHTTPRequestHandler):
     with http_fixture_server(inner) as host:
         api_client = _BaseClient(clock=FakeClock())
         with pytest.raises(DatabricksError):
-            api_client.do('GET', f'{host}/foo')
+            api_client.do("GET", f"{host}/foo")
 
     assert len(requests) == 1
 
@@ -276,19 +331,20 @@ def inner(h: BaseHTTPRequestHandler):
 
     with http_fixture_server(inner) as host:
         api_client = _BaseClient(clock=FakeClock())
-        res = api_client.do('GET', f'{host}/foo')
-        assert 'foo' in res
+        res = api_client.do("GET", f"{host}/foo")
+        assert "foo" in res
 
     assert len(requests) == 2
 
 
 @pytest.mark.parametrize(
-    'chunk_size,expected_chunks,data_size',
+    "chunk_size,expected_chunks,data_size",
     [
-        (5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
-        (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
-        (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
-    ])
+        (5, 20, 100),  # 100 / 5 bytes per chunk = 20 chunks
+        (10, 10, 100),  # 100 / 10 bytes per chunk = 10 chunks
+        (200, 1, 100),  # 100 / 200 bytes per chunk = 1 chunk
+    ],
+)
 def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
     rng = random.Random(42)
     test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
@@ -299,8 +355,8 @@ def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
     def mock_iter_content(chunk_size: int, decode_unicode: bool):
         # Simulate how requests would chunk the data.
         for i in range(0, len(test_data), chunk_size):
-            chunk = test_data[i:i + chunk_size]
-            content_chunks.append(chunk) # track chunks for verification
+            chunk = test_data[i : i + chunk_size]
+            content_chunks.append(chunk)  # track chunks for verification
             yield chunk
 
     mock_response.iter_content = mock_iter_content
@@ -315,20 +371,20 @@ def mock_iter_content(chunk_size: int, decode_unicode: bool):
             break
         received_data += chunk
 
-    assert received_data == test_data # all data was received correctly
-    assert len(content_chunks) == expected_chunks # correct number of chunks
-    assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size
+    assert received_data == test_data  # all data was received correctly
+    assert len(content_chunks) == expected_chunks  # correct number of chunks
+    assert all(len(c) <= chunk_size for c in content_chunks)  # chunks don't exceed size
 
 
 def test_is_seekable_stream():
     client = _BaseClient()
 
     # Test various input types that are not streams.
-    assert not client._is_seekable_stream(None) # None
-    assert not client._is_seekable_stream("string data") # str
-    assert not client._is_seekable_stream(b"binary data") # bytes
-    assert not client._is_seekable_stream(["list", "data"]) # list
-    assert not client._is_seekable_stream(42) # int
+    assert not client._is_seekable_stream(None)  # None
+    assert not client._is_seekable_stream("string data")  # str
+    assert not client._is_seekable_stream(b"binary data")  # bytes
+    assert not client._is_seekable_stream(["list", "data"])  # list
+    assert not client._is_seekable_stream(42)  # int
 
     # Test non-seekable stream.
     non_seekable = io.BytesIO(b"test data")
@@ -336,12 +392,12 @@ def test_is_seekable_stream():
     assert not client._is_seekable_stream(non_seekable)
 
     # Test seekable streams.
-    assert client._is_seekable_stream(io.BytesIO(b"test data")) # BytesIO
-    assert client._is_seekable_stream(io.StringIO("test data")) # StringIO
+    assert client._is_seekable_stream(io.BytesIO(b"test data"))  # BytesIO
+    assert client._is_seekable_stream(io.StringIO("test data"))  # StringIO
 
     # Test file objects.
-    with open(__file__, 'rb') as f:
-        assert client._is_seekable_stream(f) # File object
+    with open(__file__, "rb") as f:
+        assert client._is_seekable_stream(f)  # File object
 
     # Test custom seekable stream.
     class CustomSeekableStream(io.IOBase):
@@ -360,8 +416,13 @@ def tell(self):
 
 class RetryTestCase:
 
-    def __init__(self, data_provider: Callable, offset: Optional[int], expected_failure: bool,
-                 expected_result: bytes):
+    def __init__(
+        self,
+        data_provider: Callable,
+        offset: Optional[int],
+        expected_failure: bool,
+        expected_result: bytes,
+    ):
         self._data_provider = data_provider
         self._offset = offset
         self._expected_result = expected_result
@@ -376,7 +437,7 @@ def get_data(self):
     @classmethod
     def create_non_seekable_stream(cls, data: bytes):
         result = io.BytesIO(data)
-        result.seekable = lambda: False # makes the stream appear non-seekable
+        result.seekable = lambda: False  # makes the stream appear non-seekable
         return result
 
 
@@ -395,40 +456,42 @@ def raise_timeout_exception(cls):
     def return_retryable_response(cls):
         # fill response fields so that logging does not fail
         response = Response()
-        response._content = b''
+        response._content = b""
         response.status_code = 429
-        response.headers = {'Retry-After': '1'}
-        response.url = 'http://test.com/'
+        response.headers = {"Retry-After": "1"}
+        response.url = "http://test.com/"
 
         response.request = PreparedRequest()
         response.request.url = response.url
-        response.request.method = 'POST'
+        response.request.method = "POST"
         response.request.headers = None
-        response.request.body = b''
+        response.request.body = b""
         return response
 
     # following the signature of Session.request()
-    def request(self,
-                method,
-                url,
-                params=None,
-                data=None,
-                headers=None,
-                cookies=None,
-                files=None,
-                auth=None,
-                timeout=None,
-                allow_redirects=True,
-                proxies=None,
-                hooks=None,
-                stream=None,
-                verify=None,
-                cert=None,
-                json=None):
+    def request(
+        self,
+        method,
+        url,
+        params=None,
+        data=None,
+        headers=None,
+        cookies=None,
+        files=None,
+        auth=None,
+        timeout=None,
+        allow_redirects=True,
+        proxies=None,
+        hooks=None,
+        stream=None,
+        verify=None,
+        cert=None,
+        json=None,
+    ):
         request_body = data.read()
 
         if isinstance(request_body, str):
-            request_body = request_body.encode('utf-8') # to be able to compare with expected bytes
+            request_body = request_body.encode("utf-8")  # to be able to compare with expected bytes
 
         self._received_requests.append(request_body)
         if self._failure_count > 0:
@@ -438,9 +501,9 @@ def request(self,
         else:
             # fill response fields so that logging does not fail
             response = Response()
-            response._content = b''
+            response._content = b""
             response.status_code = 200
-            response.reason = 'OK'
+            response.reason = "OK"
             response.url = url
 
             response.request = PreparedRequest()
@@ -452,7 +515,7 @@ def request(self,
 
 
 @pytest.mark.parametrize(
-    'test_case',
+    "test_case",
     [
         # bytes -> BytesIO
         RetryTestCase(lambda: b"0123456789", None, False, b"0123456789"),
@@ -465,11 +528,21 @@ def request(self,
         # StringIO
         RetryTestCase(lambda: io.StringIO("0123456789"), None, False, b"0123456789"),
         # Non-seekable
-        RetryTestCase(lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"), None, True,
-                      b"0123456789")
-    ])
-@pytest.mark.parametrize('failure', [[MockSession.raise_timeout_exception, Timeout],
-                                     [MockSession.return_retryable_response, errors.TooManyRequests]])
+        RetryTestCase(
+            lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"),
+            None,
+            True,
+            b"0123456789",
+        ),
+    ],
+)
+@pytest.mark.parametrize(
+    "failure",
+    [
+        [MockSession.raise_timeout_exception, Timeout],
+        [MockSession.return_retryable_response, errors.TooManyRequests],
+    ],
+)
 def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callable[[], Response], Type]):
     failure_count = 2
 
@@ -480,7 +553,7 @@ def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callabl
     client._session = session
 
     def do():
-        client.do('POST', f'test.com/foo', data=data)
+        client.do("POST", f"test.com/foo", data=data)
 
     if test_case._expected_failure:
         expected_attempts_made = 1
diff --git a/tests/test_compute_mixins.py b/tests/test_compute_mixins.py
index bcfd528bf..ec895b022 100644
--- a/tests/test_compute_mixins.py
+++ b/tests/test_compute_mixins.py
@@ -3,12 +3,21 @@
 from databricks.sdk.mixins.compute import SemVer
 
 
-@pytest.mark.parametrize("given,expected",
-                         [('v0.0.4', SemVer(0, 0, 4)), ('v1.2.3', SemVer(1, 2, 3)),
-                          ('v12.1.x', SemVer(12, 1, 0)), ('v10.20.30', SemVer(10, 20, 30)),
-                          ('v1.1.2+meta', SemVer(1, 1, 2, build='meta')),
-                          ('v1.0.0-alpha', SemVer(1, 0, 0, pre_release='alpha')),
-                          ('8.x-snapshot-scala2.12', SemVer(8, 0, 0, pre_release='snapshot-scala2.12')), ])
+@pytest.mark.parametrize(
+    "given,expected",
+    [
+        ("v0.0.4", SemVer(0, 0, 4)),
+        ("v1.2.3", SemVer(1, 2, 3)),
+        ("v12.1.x", SemVer(12, 1, 0)),
+        ("v10.20.30", SemVer(10, 20, 30)),
+        ("v1.1.2+meta", SemVer(1, 1, 2, build="meta")),
+        ("v1.0.0-alpha", SemVer(1, 0, 0, pre_release="alpha")),
+        (
+            "8.x-snapshot-scala2.12",
+            SemVer(8, 0, 0, pre_release="snapshot-scala2.12"),
+        ),
+    ],
+)
 def test_parse_semver(given, expected):
     assert SemVer.parse(given) == expected
 
diff --git a/tests/test_config.py b/tests/test_config.py
index ebc8d683a..dc9d8e410 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -18,19 +18,28 @@
 
 
 def test_config_supports_legacy_credentials_provider():
-    c = Config(credentials_provider=noop_credentials, product='foo', product_version='1.2.3')
+    c = Config(
+        credentials_provider=noop_credentials,
+        product="foo",
+        product_version="1.2.3",
+    )
     c2 = c.copy()
-    assert c2._product_info == ('foo', '1.2.3')
-
-
-@pytest.mark.parametrize('host,expected', [("https://abc.def.ghi", "https://abc.def.ghi"),
-                                           ("https://abc.def.ghi/", "https://abc.def.ghi"),
-                                           ("abc.def.ghi", "https://abc.def.ghi"),
-                                           ("abc.def.ghi/", "https://abc.def.ghi"),
-                                           ("https://abc.def.ghi:443", "https://abc.def.ghi"),
-                                           ("abc.def.ghi:443", "https://abc.def.ghi")])
+    assert c2._product_info == ("foo", "1.2.3")
+
+
+@pytest.mark.parametrize(
+    "host,expected",
+    [
+        ("https://abc.def.ghi", "https://abc.def.ghi"),
+        ("https://abc.def.ghi/", "https://abc.def.ghi"),
+        ("abc.def.ghi", "https://abc.def.ghi"),
+        ("abc.def.ghi/", "https://abc.def.ghi"),
+        ("https://abc.def.ghi:443", "https://abc.def.ghi"),
+        ("abc.def.ghi:443", "https://abc.def.ghi"),
+    ],
+)
 def test_config_host_url_format_check(mocker, host, expected):
-    mocker.patch('databricks.sdk.config.Config.init_auth')
+    mocker.patch("databricks.sdk.config.Config.init_auth")
     assert Config(host=host).host == expected
 
 
@@ -40,35 +49,48 @@ class MockUname:
 
         @property
         def system(self):
-            return 'TestOS'
+            return "TestOS"
 
     # Clear all environment variables and cached CICD provider.
     for k in os.environ:
         monkeypatch.delenv(k, raising=False)
     useragent._cicd_provider = None
 
-    monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
-    monkeypatch.setattr(platform, 'uname', MockUname)
-    monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
-    monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM_VERSION', "0.0.1")
-    monkeypatch.setenv('DATABRICKS_RUNTIME_VERSION', "13.1 anything/else")
-
-    config = Config(host='http://localhost', username="something", password="something", product='test',
-                    product_version='0.0.0') \
-        .with_user_agent_extra('test-extra-1', '1') \
-        .with_user_agent_extra('test-extra-2', '2')
+    monkeypatch.setattr(platform, "python_version", lambda: "3.0.0")
+    monkeypatch.setattr(platform, "uname", MockUname)
+    monkeypatch.setenv("DATABRICKS_SDK_UPSTREAM", "upstream-product")
+    monkeypatch.setenv("DATABRICKS_SDK_UPSTREAM_VERSION", "0.0.1")
+    monkeypatch.setenv("DATABRICKS_RUNTIME_VERSION", "13.1 anything/else")
+
+    config = (
+        Config(
+            host="http://localhost",
+            username="something",
+            password="something",
+            product="test",
+            product_version="0.0.0",
+        )
+        .with_user_agent_extra("test-extra-1", "1")
+        .with_user_agent_extra("test-extra-2", "2")
+    )
 
     assert config.user_agent == (
         f"test/0.0.0 databricks-sdk-py/{__version__} python/3.0.0 os/testos auth/basic"
         " test-extra-1/1 test-extra-2/2 upstream/upstream-product upstream-version/0.0.1"
-        " runtime/13.1-anything-else")
+        " runtime/13.1-anything-else"
+    )
 
-    with_product('some-product', '0.32.1')
-    config2 = Config(host='http://localhost', token='...')
-    assert config2.user_agent.startswith('some-product/0.32.1')
+    with_product("some-product", "0.32.1")
+    config2 = Config(host="http://localhost", token="...")
+    assert config2.user_agent.startswith("some-product/0.32.1")
 
-    config3 = Config(host='http://localhost', token='...', product='abc', product_version='1.2.3')
-    assert not config3.user_agent.startswith('some-product/0.32.1')
+    config3 = Config(
+        host="http://localhost",
+        token="...",
+        product="abc",
+        product_version="1.2.3",
+    )
+    assert not config3.user_agent.startswith("some-product/0.32.1")
 
 
 def test_config_copy_deep_copies_user_agent_other_info(config):
@@ -90,13 +112,17 @@ def test_config_copy_deep_copies_user_agent_other_info(config):
 
 
 def test_config_deep_copy(monkeypatch, mocker, tmp_path):
-    mocker.patch('databricks.sdk.credentials_provider.CliTokenSource.refresh',
-                 return_value=Token(access_token='token',
-                                    token_type='Bearer',
-                                    expiry=datetime(2023, 5, 22, 0, 0, 0)))
+    mocker.patch(
+        "databricks.sdk.credentials_provider.CliTokenSource.refresh",
+        return_value=Token(
+            access_token="token",
+            token_type="Bearer",
+            expiry=datetime(2023, 5, 22, 0, 0, 0),
+        ),
+    )
 
     write_large_dummy_executable(tmp_path)
-    monkeypatch.setenv('PATH', tmp_path.as_posix())
+    monkeypatch.setenv("PATH", tmp_path.as_posix())
 
     config = Config(host="https://abc123.azuredatabricks.net", auth_type="databricks-cli")
     config_copy = config.deep_copy()
@@ -104,11 +130,12 @@ def test_config_deep_copy(monkeypatch, mocker, tmp_path):
 
 
 def write_large_dummy_executable(path: pathlib.Path):
-    cli = path.joinpath('databricks')
+    cli = path.joinpath("databricks")
 
     # Generate a long random string to inflate the file size.
-    random_string = ''.join(random.choice(string.ascii_letters) for i in range(1024 * 1024))
-    cli.write_text("""#!/bin/sh
+    random_string = "".join(random.choice(string.ascii_letters) for i in range(1024 * 1024))
+    cli.write_text(
+        """#!/bin/sh
 cat <= (1024 * 1024)
     return cli
@@ -125,7 +154,7 @@ def write_large_dummy_executable(path: pathlib.Path):
 
 def test_load_azure_tenant_id_404(requests_mock, monkeypatch):
     set_az_path(monkeypatch)
-    mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=404)
+    mock = requests_mock.get("https://abc123.azuredatabricks.net/aad/auth", status_code=404)
     cfg = Config(host="https://abc123.azuredatabricks.net")
     assert cfg.azure_tenant_id is None
     assert mock.called_once
@@ -133,7 +162,7 @@ def test_load_azure_tenant_id_404(requests_mock, monkeypatch):
 
 def test_load_azure_tenant_id_no_location_header(requests_mock, monkeypatch):
     set_az_path(monkeypatch)
-    mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=302)
+    mock = requests_mock.get("https://abc123.azuredatabricks.net/aad/auth", status_code=302)
     cfg = Config(host="https://abc123.azuredatabricks.net")
     assert cfg.azure_tenant_id is None
     assert mock.called_once
@@ -141,9 +170,11 @@ def test_load_azure_tenant_id_no_location_header(requests_mock, monkeypatch):
 
 def test_load_azure_tenant_id_unparsable_location_header(requests_mock, monkeypatch):
     set_az_path(monkeypatch)
-    mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth',
-                             status_code=302,
-                             headers={'Location': 'https://unexpected-location'})
+    mock = requests_mock.get(
+        "https://abc123.azuredatabricks.net/aad/auth",
+        status_code=302,
+        headers={"Location": "https://unexpected-location"},
+    )
     cfg = Config(host="https://abc123.azuredatabricks.net")
     assert cfg.azure_tenant_id is None
     assert mock.called_once
@@ -152,9 +183,10 @@ def test_load_azure_tenant_id_unparsable_location_header(requests_mock, monkeypa
 def test_load_azure_tenant_id_happy_path(requests_mock, monkeypatch):
     set_az_path(monkeypatch)
     mock = requests_mock.get(
-        'https://abc123.azuredatabricks.net/aad/auth',
+        "https://abc123.azuredatabricks.net/aad/auth",
         status_code=302,
-        headers={'Location': 'https://login.microsoftonline.com/tenant-id/oauth2/authorize'})
+        headers={"Location": "https://login.microsoftonline.com/tenant-id/oauth2/authorize"},
+    )
     cfg = Config(host="https://abc123.azuredatabricks.net")
-    assert cfg.azure_tenant_id == 'tenant-id'
+    assert cfg.azure_tenant_id == "tenant-id"
     assert mock.called_once
diff --git a/tests/test_core.py b/tests/test_core.py
index 32431172b..7377bf2d7 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -27,12 +27,12 @@
 
 
 def test_parse_dsn():
-    cfg = Config.parse_dsn('databricks://user:pass@foo.databricks.com?retry_timeout_seconds=600')
+    cfg = Config.parse_dsn("databricks://user:pass@foo.databricks.com?retry_timeout_seconds=600")
 
     headers = cfg.authenticate()
 
-    assert headers['Authorization'] == 'Basic dXNlcjpwYXNz'
-    assert 'basic' == cfg.auth_type
+    assert headers["Authorization"] == "Basic dXNlcjpwYXNz"
+    assert "basic" == cfg.auth_type
 
 
 def test_databricks_cli_token_source_relative_path(config):
@@ -48,26 +48,30 @@ def test_databricks_cli_token_source_absolute_path(config):
 
 
 def test_databricks_cli_token_source_not_installed(config, monkeypatch):
-    monkeypatch.setenv('PATH', 'whatever')
+    monkeypatch.setenv("PATH", "whatever")
     with pytest.raises(FileNotFoundError, match="not installed"):
         DatabricksCliTokenSource(config)
 
 
-@pytest.mark.parametrize("date_string,expected",
-                         [("2023-12-01T15:19:48.007742617Z", datetime(2023, 12, 1, 15, 19, 48)),
-                          ("2023-12-05T15:59:01.40081+11:00", datetime(2023, 12, 5, 15, 59, 1)),
-                          ("2023-12-06 10:06:05", datetime(2023, 12, 6, 10, 6, 5))])
+@pytest.mark.parametrize(
+    "date_string,expected",
+    [
+        ("2023-12-01T15:19:48.007742617Z", datetime(2023, 12, 1, 15, 19, 48)),
+        ("2023-12-05T15:59:01.40081+11:00", datetime(2023, 12, 5, 15, 59, 1)),
+        ("2023-12-06 10:06:05", datetime(2023, 12, 6, 10, 6, 5)),
+    ],
+)
 def test_databricks_cli_token_parse_expiry(date_string, expected):
     assert CliTokenSource._parse_expiry(date_string) == expected
 
 
 def write_small_dummy_executable(path: pathlib.Path):
     if platform.system() == "Windows":
-        cli = path.joinpath('databricks.exe')
+        cli = path.joinpath("databricks.exe")
         cli.touch()
         cli.write_text('@echo off\necho "hello world"\n')
     else:
-        cli = path.joinpath('databricks')
+        cli = path.joinpath("databricks")
         cli.write_text('#!/bin/sh\necho "hello world"\n')
         cli.chmod(0o755)
     assert cli.stat().st_size < 1024
@@ -75,11 +79,12 @@ def write_small_dummy_executable(path: pathlib.Path):
 
 
 def write_large_dummy_executable(path: pathlib.Path):
-    cli = path.joinpath('databricks')
+    cli = path.joinpath("databricks")
 
     # Generate a long random string to inflate the file size.
-    random_string = ''.join(random.choice(string.ascii_letters) for i in range(1024 * 1024))
-    cli.write_text("""#!/bin/sh
+    random_string = "".join(random.choice(string.ascii_letters) for i in range(1024 * 1024))
+    cli.write_text(
+        """#!/bin/sh
 cat <= (1024 * 1024)
     return cli
@@ -96,25 +103,25 @@ def write_large_dummy_executable(path: pathlib.Path):
 
 def test_databricks_cli_token_source_installed_legacy(config, monkeypatch, tmp_path):
     write_small_dummy_executable(tmp_path)
-    monkeypatch.setenv('PATH', tmp_path.as_posix())
+    monkeypatch.setenv("PATH", tmp_path.as_posix())
     with pytest.raises(FileNotFoundError, match="version <0.100.0 detected"):
         DatabricksCliTokenSource(config)
 
 
 def test_databricks_cli_token_source_installed_legacy_with_symlink(config, monkeypatch, tmp_path):
-    dir1 = tmp_path.joinpath('dir1')
-    dir2 = tmp_path.joinpath('dir2')
+    dir1 = tmp_path.joinpath("dir1")
+    dir2 = tmp_path.joinpath("dir2")
     dir1.mkdir()
     dir2.mkdir()
 
-    if platform.system() == 'Windows':
+    if platform.system() == "Windows":
         (dir1 / "databricks.exe").symlink_to(write_small_dummy_executable(dir2))
     else:
         (dir1 / "databricks").symlink_to(write_small_dummy_executable(dir2))
 
     path = pathlib.Path(dir1)
     path = str(path)
-    monkeypatch.setenv('PATH', path)
+    monkeypatch.setenv("PATH", path)
 
     with pytest.raises(FileNotFoundError, match="version <0.100.0 detected"):
         DatabricksCliTokenSource(config)
@@ -122,13 +129,13 @@ def test_databricks_cli_token_source_installed_legacy_with_symlink(config, monke
 
 def test_databricks_cli_token_source_installed_new(config, monkeypatch, tmp_path):
     write_large_dummy_executable(tmp_path)
-    monkeypatch.setenv('PATH', tmp_path.as_posix())
+    monkeypatch.setenv("PATH", tmp_path.as_posix())
     DatabricksCliTokenSource(config)
 
 
 def test_databricks_cli_token_source_installed_both(config, monkeypatch, tmp_path):
-    dir1 = tmp_path.joinpath('dir1')
-    dir2 = tmp_path.joinpath('dir2')
+    dir1 = tmp_path.joinpath("dir1")
+    dir2 = tmp_path.joinpath("dir2")
     dir1.mkdir()
     dir2.mkdir()
 
@@ -136,35 +143,39 @@ def test_databricks_cli_token_source_installed_both(config, monkeypatch, tmp_pat
     write_large_dummy_executable(dir2)
 
     # Resolve small before large.
-    monkeypatch.setenv('PATH', str(os.pathsep).join([dir1.as_posix(), dir2.as_posix()]))
+    monkeypatch.setenv("PATH", str(os.pathsep).join([dir1.as_posix(), dir2.as_posix()]))
     DatabricksCliTokenSource(config)
 
     # Resolve large before small.
-    monkeypatch.setenv('PATH', str(os.pathsep).join([dir2.as_posix(), dir1.as_posix()]))
+    monkeypatch.setenv("PATH", str(os.pathsep).join([dir2.as_posix(), dir1.as_posix()]))
     DatabricksCliTokenSource(config)
 
 
 def test_databricks_cli_credential_provider_not_installed(config, monkeypatch):
-    monkeypatch.setenv('PATH', 'whatever')
+    monkeypatch.setenv("PATH", "whatever")
     assert databricks_cli(config) == None
 
 
 def test_databricks_cli_credential_provider_installed_legacy(config, monkeypatch, tmp_path):
     write_small_dummy_executable(tmp_path)
-    monkeypatch.setenv('PATH', tmp_path.as_posix())
+    monkeypatch.setenv("PATH", tmp_path.as_posix())
     assert databricks_cli(config) == None
 
 
 def test_databricks_cli_credential_provider_installed_new(config, monkeypatch, tmp_path, mocker):
-    get_mock = mocker.patch('databricks.sdk.credentials_provider.CliTokenSource.refresh',
-                            return_value=Token(access_token='token',
-                                               token_type='Bearer',
-                                               expiry=datetime(2023, 5, 22, 0, 0, 0)))
+    get_mock = mocker.patch(
+        "databricks.sdk.credentials_provider.CliTokenSource.refresh",
+        return_value=Token(
+            access_token="token",
+            token_type="Bearer",
+            expiry=datetime(2023, 5, 22, 0, 0, 0),
+        ),
+    )
     write_large_dummy_executable(tmp_path)
-    path = str(os.pathsep).join([tmp_path.as_posix(), os.environ['PATH']])
+    path = str(os.pathsep).join([tmp_path.as_posix(), os.environ["PATH"]])
     path = pathlib.Path(path)
     path = str(path)
-    monkeypatch.setenv('PATH', path)
+    monkeypatch.setenv("PATH", path)
 
     assert databricks_cli(config) is not None
     assert get_mock.call_count == 1
@@ -176,27 +187,35 @@ class MockUname:
 
         @property
         def system(self):
-            return 'TestOS'
+            return "TestOS"
 
     # Clear all environment variables and cached CICD provider.
     for k in os.environ:
         monkeypatch.delenv(k, raising=False)
     useragent._cicd_provider = None
 
-    monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
-    monkeypatch.setattr(platform, 'uname', MockUname)
-    monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
-    monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM_VERSION', "0.0.1")
-    monkeypatch.setenv('DATABRICKS_RUNTIME_VERSION', "13.1 anything/else")
-
-    config = Config(host='http://localhost', username="something", password="something", product='test',
-                    product_version='0.0.0') \
-        .with_user_agent_extra('test-extra-1', '1') \
-        .with_user_agent_extra('test-extra-2', '2')
+    monkeypatch.setattr(platform, "python_version", lambda: "3.0.0")
+    monkeypatch.setattr(platform, "uname", MockUname)
+    monkeypatch.setenv("DATABRICKS_SDK_UPSTREAM", "upstream-product")
+    monkeypatch.setenv("DATABRICKS_SDK_UPSTREAM_VERSION", "0.0.1")
+    monkeypatch.setenv("DATABRICKS_RUNTIME_VERSION", "13.1 anything/else")
+
+    config = (
+        Config(
+            host="http://localhost",
+            username="something",
+            password="something",
+            product="test",
+            product_version="0.0.0",
+        )
+        .with_user_agent_extra("test-extra-1", "1")
+        .with_user_agent_extra("test-extra-2", "2")
+    )
 
     assert config.user_agent == (
         f"test/0.0.0 databricks-sdk-py/{__version__} python/3.0.0 os/testos auth/basic test-extra-1/1 test-extra-2/2"
-        " upstream/upstream-product upstream-version/0.0.1 runtime/13.1-anything-else")
+        " upstream/upstream-product upstream-version/0.0.1 runtime/13.1-anything-else"
+    )
 
 
 def test_config_copy_shallow_copies_credential_provider():
@@ -210,7 +229,7 @@ def __init__(self):
         def auth_type(self) -> str:
             return "test"
 
-        def __call__(self, cfg: 'Config') -> CredentialsProvider:
+        def __call__(self, cfg: "Config") -> CredentialsProvider:
             return lambda: {"token": self._token}
 
         def refresh(self):
@@ -253,45 +272,50 @@ class DatabricksConfig(Config):
         def __init__(self):
             super().__init__()
 
-    with pytest.raises(ValueError): # As opposed to `KeyError`.
+    with pytest.raises(ValueError):  # As opposed to `KeyError`.
         DatabricksConfig()
 
 
 def test_config_parsing_non_string_env_vars(monkeypatch):
-    monkeypatch.setenv('DATABRICKS_DEBUG_TRUNCATE_BYTES', '100')
-    c = Config(host='http://localhost', credentials_strategy=noop_credentials)
+    monkeypatch.setenv("DATABRICKS_DEBUG_TRUNCATE_BYTES", "100")
+    c = Config(host="http://localhost", credentials_strategy=noop_credentials)
     assert c.debug_truncate_bytes == 100
 
 
 def test_access_control_list(config, requests_mock):
-    requests_mock.post("http://localhost/api/2.1/jobs/create",
-                       request_headers={"User-Agent": config.user_agent})
+    requests_mock.post(
+        "http://localhost/api/2.1/jobs/create",
+        request_headers={"User-Agent": config.user_agent},
+    )
 
     w = WorkspaceClient(config=config)
     res = w.jobs.create(access_control_list=[AccessControlRequest(group_name="group")])
 
     assert requests_mock.call_count == 1
     assert requests_mock.called
-    assert requests_mock.last_request.json() == {'access_control_list': [{'group_name': 'group'}]}
+    assert requests_mock.last_request.json() == {"access_control_list": [{"group_name": "group"}]}
 
 
 def test_shares(config, requests_mock):
-    requests_mock.patch("http://localhost/api/2.1/unity-catalog/shares/jobId/permissions",
-                        request_headers={"User-Agent": config.user_agent})
+    requests_mock.patch(
+        "http://localhost/api/2.1/unity-catalog/shares/jobId/permissions",
+        request_headers={"User-Agent": config.user_agent},
+    )
 
     w = WorkspaceClient(config=config)
     res = w.shares.update_permissions(name="jobId", changes=[PermissionsChange(principal="principal")])
 
     assert requests_mock.call_count == 1
     assert requests_mock.called
-    assert requests_mock.last_request.json() == {'changes': [{'principal': 'principal'}]}
+    assert requests_mock.last_request.json() == {"changes": [{"principal": "principal"}]}
 
 
 def test_deletes(config, requests_mock):
-    requests_mock.delete("http://localhost/api/2.0/sql/alerts/alertId",
-                         request_headers={"User-Agent": config.user_agent},
-                         text="null",
-                         )
+    requests_mock.delete(
+        "http://localhost/api/2.0/sql/alerts/alertId",
+        request_headers={"User-Agent": config.user_agent},
+        text="null",
+    )
 
     w = WorkspaceClient(config=config)
     res = w.alerts.delete(id="alertId")
@@ -303,19 +327,34 @@ def test_deletes(config, requests_mock):
 
 
 @pytest.mark.parametrize(
-    'status_code,headers,body,expected_error',
-    [(401, {}, {
-        'error_code': 'UNAUTHORIZED',
-        'message': 'errorMessage',
-    },
-      errors.Unauthenticated('errorMessage. Config: host=http://localhost, auth_type=noop',
-                             error_code='UNAUTHORIZED')),
-     (403, {}, {
-         'error_code': 'FORBIDDEN',
-         'message': 'errorMessage',
-     },
-      errors.PermissionDenied('errorMessage. Config: host=http://localhost, auth_type=noop',
-                              error_code='FORBIDDEN')), ])
+    "status_code,headers,body,expected_error",
+    [
+        (
+            401,
+            {},
+            {
+                "error_code": "UNAUTHORIZED",
+                "message": "errorMessage",
+            },
+            errors.Unauthenticated(
+                "errorMessage. Config: host=http://localhost, auth_type=noop",
+                error_code="UNAUTHORIZED",
+            ),
+        ),
+        (
+            403,
+            {},
+            {
+                "error_code": "FORBIDDEN",
+                "message": "errorMessage",
+            },
+            errors.PermissionDenied(
+                "errorMessage. Config: host=http://localhost, auth_type=noop",
+                error_code="FORBIDDEN",
+            ),
+        ),
+    ],
+)
 def test_error(config, requests_mock, status_code, headers, body, expected_error):
     client = ApiClient(config)
     requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
@@ -326,7 +365,10 @@ def test_error(config, requests_mock, status_code, headers, body, expected_error
     assert str(actual) == str(expected_error)
     assert actual.error_code == expected_error.error_code
     assert actual.retry_after_secs == expected_error.retry_after_secs
-    expected_error_infos, actual_error_infos = expected_error.get_error_info(), actual.get_error_info()
+    expected_error_infos, actual_error_infos = (
+        expected_error.get_error_info(),
+        actual.get_error_info(),
+    )
     assert len(expected_error_infos) == len(actual_error_infos)
     for expected, actual in zip(expected_error_infos, actual_error_infos):
         assert expected.type == actual.type
@@ -338,59 +380,67 @@ def test_error(config, requests_mock, status_code, headers, body, expected_error
 def test_github_oidc_flow_works_with_azure(monkeypatch):
 
     def inner(h: BaseHTTPRequestHandler):
-        if 'audience=api://AzureADTokenExchange' in h.path:
-            auth = h.headers['Authorization']
-            assert 'Bearer gh-actions-token' == auth
+        if "audience=api://AzureADTokenExchange" in h.path:
+            auth = h.headers["Authorization"]
+            assert "Bearer gh-actions-token" == auth
             h.send_response(200)
             h.end_headers()
             h.wfile.write(b'{"value": "this_is_jwt_token"}')
             return
-        if '/oidc/oauth2/v2.0/authorize' == h.path:
+        if "/oidc/oauth2/v2.0/authorize" == h.path:
             h.send_response(301)
-            h.send_header('Location', f'http://{h.headers["Host"]}/mocked-tenant-id/irrelevant/part')
+            h.send_header(
+                "Location",
+                f'http://{h.headers["Host"]}/mocked-tenant-id/irrelevant/part',
+            )
             h.end_headers()
             return
-        if '/mocked-tenant-id/oauth2/token' == h.path:
+        if "/mocked-tenant-id/oauth2/token" == h.path:
             h.send_response(200)
             h.end_headers()
             h.wfile.write(b'{"expires_in": 100, "access_token": "this-is-it", "token_type": "Taker"}')
 
     with http_fixture_server(inner) as host:
-        monkeypatch.setenv('ACTIONS_ID_TOKEN_REQUEST_URL', f'{host}/oidc')
-        monkeypatch.setenv('ACTIONS_ID_TOKEN_REQUEST_TOKEN', 'gh-actions-token')
-        azure_environment = AzureEnvironment(name=host,
-                                             service_management_endpoint=host + '/',
-                                             resource_manager_endpoint=host + '/',
-                                             active_directory_endpoint=host + '/')
-        databricks_environment = DatabricksEnvironment(Cloud.AZURE,
-                                                       '...',
-                                                       azure_environment=azure_environment)
-        cfg = Config(host=host,
-                     azure_workspace_resource_id=...,
-                     azure_client_id='test',
-                     azure_environment=host,
-                     databricks_environment=databricks_environment)
+        monkeypatch.setenv("ACTIONS_ID_TOKEN_REQUEST_URL", f"{host}/oidc")
+        monkeypatch.setenv("ACTIONS_ID_TOKEN_REQUEST_TOKEN", "gh-actions-token")
+        azure_environment = AzureEnvironment(
+            name=host,
+            service_management_endpoint=host + "/",
+            resource_manager_endpoint=host + "/",
+            active_directory_endpoint=host + "/",
+        )
+        databricks_environment = DatabricksEnvironment(Cloud.AZURE, "...", azure_environment=azure_environment)
+        cfg = Config(
+            host=host,
+            azure_workspace_resource_id=...,
+            azure_client_id="test",
+            azure_environment=host,
+            databricks_environment=databricks_environment,
+        )
         headers = cfg.authenticate()
 
-        assert {'Authorization': 'Taker this-is-it'} == headers
+        assert {"Authorization": "Taker this-is-it"} == headers
 
 
 @pytest.mark.parametrize(
-    ['azure_environment', 'expected'],
+    ["azure_environment", "expected"],
     [
-        ('PUBLIC', ENVIRONMENTS['PUBLIC']),
-        ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']),
-        ('CHINA', ENVIRONMENTS['CHINA']),
-        ('public', ENVIRONMENTS['PUBLIC']),
-        ('usgovernment', ENVIRONMENTS['USGOVERNMENT']),
-        ('china', ENVIRONMENTS['CHINA']),
+        ("PUBLIC", ENVIRONMENTS["PUBLIC"]),
+        ("USGOVERNMENT", ENVIRONMENTS["USGOVERNMENT"]),
+        ("CHINA", ENVIRONMENTS["CHINA"]),
+        ("public", ENVIRONMENTS["PUBLIC"]),
+        ("usgovernment", ENVIRONMENTS["USGOVERNMENT"]),
+        ("china", ENVIRONMENTS["CHINA"]),
         # Kept for historical compatibility
-        ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']),
-        ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']),
-        ('AzureChinaCloud', ENVIRONMENTS['CHINA']),
-    ])
+        ("AzurePublicCloud", ENVIRONMENTS["PUBLIC"]),
+        ("AzureUSGovernment", ENVIRONMENTS["USGOVERNMENT"]),
+        ("AzureChinaCloud", ENVIRONMENTS["CHINA"]),
+    ],
+)
 def test_azure_environment(azure_environment, expected):
-    c = Config(credentials_strategy=noop_credentials,
-               azure_workspace_resource_id='...',
-               azure_environment=azure_environment)
+    c = Config(
+        credentials_strategy=noop_credentials,
+        azure_workspace_resource_id="...",
+        azure_environment=azure_environment,
+    )
     assert c.arm_environment == expected
diff --git a/tests/test_credentials_provider.py b/tests/test_credentials_provider.py
index 67e6f5b35..fb24d9dc4 100644
--- a/tests/test_credentials_provider.py
+++ b/tests/test_credentials_provider.py
@@ -8,16 +8,16 @@ def test_external_browser_refresh_success(mocker):
 
     # Mock Config.
     mock_cfg = Mock()
-    mock_cfg.auth_type = 'external-browser'
-    mock_cfg.host = 'test-host'
-    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
-    mock_cfg.client_id = 'test-client-id' # Or use azure_client_id
-    mock_cfg.client_secret = 'test-client-secret' # Or use azure_client_secret
+    mock_cfg.auth_type = "external-browser"
+    mock_cfg.host = "test-host"
+    mock_cfg.oidc_endpoints = {"token_endpoint": "test-token-endpoint"}
+    mock_cfg.client_id = "test-client-id"  # Or use azure_client_id
+    mock_cfg.client_secret = "test-client-secret"  # Or use azure_client_secret
 
     # Mock TokenCache.
     mock_token_cache = Mock()
     mock_session_credentials = Mock()
-    mock_session_credentials.token.return_value = "valid_token" # Simulate successful refresh
+    mock_session_credentials.token.return_value = "valid_token"  # Simulate successful refresh
     mock_token_cache.load.return_value = mock_session_credentials
 
     # Mock SessionCredentials.
@@ -25,12 +25,15 @@ def test_external_browser_refresh_success(mocker):
     mock_session_credentials.return_value = want_credentials_provider
 
     # Inject the mock implementations.
-    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch(
+        "databricks.sdk.credentials_provider.TokenCache",
+        return_value=mock_token_cache,
+    )
 
     got_credentials_provider = external_browser(mock_cfg)
 
     mock_token_cache.load.assert_called_once()
-    mock_session_credentials.token.assert_called_once() # Verify token refresh was attempted
+    mock_session_credentials.token.assert_called_once()  # Verify token refresh was attempted
     assert got_credentials_provider == want_credentials_provider
 
 
@@ -39,17 +42,16 @@ def test_external_browser_refresh_failure_new_oauth_flow(mocker):
 
     # Mock Config.
     mock_cfg = Mock()
-    mock_cfg.auth_type = 'external-browser'
-    mock_cfg.host = 'test-host'
-    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
-    mock_cfg.client_id = 'test-client-id'
-    mock_cfg.client_secret = 'test-client-secret'
+    mock_cfg.auth_type = "external-browser"
+    mock_cfg.host = "test-host"
+    mock_cfg.oidc_endpoints = {"token_endpoint": "test-token-endpoint"}
+    mock_cfg.client_id = "test-client-id"
+    mock_cfg.client_secret = "test-client-secret"
 
     # Mock TokenCache.
     mock_token_cache = Mock()
     mock_session_credentials = Mock()
-    mock_session_credentials.token.side_effect = Exception(
-        "Simulated refresh error") # Simulate a failed refresh
+    mock_session_credentials.token.side_effect = Exception("Simulated refresh error")  # Simulate a failed refresh
     mock_token_cache.load.return_value = mock_session_credentials
 
     # Mock SessionCredentials.
@@ -63,13 +65,19 @@ def test_external_browser_refresh_failure_new_oauth_flow(mocker):
     mock_oauth_client.initiate_consent.return_value = mock_consent
 
     # Inject the mock implementations.
-    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
-    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+    mocker.patch(
+        "databricks.sdk.credentials_provider.TokenCache",
+        return_value=mock_token_cache,
+    )
+    mocker.patch(
+        "databricks.sdk.credentials_provider.OAuthClient",
+        return_value=mock_oauth_client,
+    )
 
     got_credentials_provider = external_browser(mock_cfg)
 
     mock_token_cache.load.assert_called_once()
-    mock_session_credentials.token.assert_called_once() # Refresh attempt
+    mock_session_credentials.token.assert_called_once()  # Refresh attempt
     mock_oauth_client.initiate_consent.assert_called_once()
     mock_consent.launch_external_browser.assert_called_once()
     mock_token_cache.save.assert_called_once_with(mock_session_credentials)
@@ -81,15 +89,15 @@ def test_external_browser_no_cached_credentials(mocker):
 
     # Mock Config.
     mock_cfg = Mock()
-    mock_cfg.auth_type = 'external-browser'
-    mock_cfg.host = 'test-host'
-    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
-    mock_cfg.client_id = 'test-client-id'
-    mock_cfg.client_secret = 'test-client-secret'
+    mock_cfg.auth_type = "external-browser"
+    mock_cfg.host = "test-host"
+    mock_cfg.oidc_endpoints = {"token_endpoint": "test-token-endpoint"}
+    mock_cfg.client_id = "test-client-id"
+    mock_cfg.client_secret = "test-client-secret"
 
     # Mock TokenCache.
     mock_token_cache = Mock()
-    mock_token_cache.load.return_value = None # No cached credentials
+    mock_token_cache.load.return_value = None  # No cached credentials
 
     # Mock SessionCredentials.
     mock_session_credentials = Mock()
@@ -103,8 +111,14 @@ def test_external_browser_no_cached_credentials(mocker):
     mock_oauth_client.initiate_consent.return_value = mock_consent
 
     # Inject the mock implementations.
-    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
-    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+    mocker.patch(
+        "databricks.sdk.credentials_provider.TokenCache",
+        return_value=mock_token_cache,
+    )
+    mocker.patch(
+        "databricks.sdk.credentials_provider.OAuthClient",
+        return_value=mock_oauth_client,
+    )
 
     got_credentials_provider = external_browser(mock_cfg)
 
@@ -120,23 +134,29 @@ def test_external_browser_consent_fails(mocker):
 
     # Mock Config.
     mock_cfg = Mock()
-    mock_cfg.auth_type = 'external-browser'
-    mock_cfg.host = 'test-host'
-    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
-    mock_cfg.client_id = 'test-client-id'
-    mock_cfg.client_secret = 'test-client-secret'
+    mock_cfg.auth_type = "external-browser"
+    mock_cfg.host = "test-host"
+    mock_cfg.oidc_endpoints = {"token_endpoint": "test-token-endpoint"}
+    mock_cfg.client_id = "test-client-id"
+    mock_cfg.client_secret = "test-client-secret"
 
     # Mock TokenCache.
     mock_token_cache = Mock()
-    mock_token_cache.load.return_value = None # No cached credentials
+    mock_token_cache.load.return_value = None  # No cached credentials
 
     # Mock OAuthClient.
     mock_oauth_client = Mock()
-    mock_oauth_client.initiate_consent.return_value = None # Simulate consent failure
+    mock_oauth_client.initiate_consent.return_value = None  # Simulate consent failure
 
     # Inject the mock implementations.
-    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
-    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+    mocker.patch(
+        "databricks.sdk.credentials_provider.TokenCache",
+        return_value=mock_token_cache,
+    )
+    mocker.patch(
+        "databricks.sdk.credentials_provider.OAuthClient",
+        return_value=mock_oauth_client,
+    )
 
     got_credentials_provider = external_browser(mock_cfg)
 
diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py
index 1eac92382..d7721f014 100644
--- a/tests/test_data_plane.py
+++ b/tests/test_data_plane.py
@@ -6,7 +6,11 @@
 
 info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url")
 
-token = Token(access_token="token", token_type="type", expiry=datetime.now() + timedelta(hours=1))
+token = Token(
+    access_token="token",
+    token_type="type",
+    expiry=datetime.now() + timedelta(hours=1),
+)
 
 
 class MockRefresher:
@@ -25,18 +29,30 @@ def throw_exception():
 
 def test_not_cached():
     data_plane = DataPlaneService()
-    res = data_plane.get_data_plane_details("method", ["params"], lambda: info,
-                                            lambda a: MockRefresher(info.authorization_details).__call__(a))
+    res = data_plane.get_data_plane_details(
+        "method",
+        ["params"],
+        lambda: info,
+        lambda a: MockRefresher(info.authorization_details).__call__(a),
+    )
     assert res.endpoint_url == info.endpoint_url
     assert res.token == token
 
 
 def test_token_expired():
-    expired = Token(access_token="expired", token_type="type", expiry=datetime.now() + timedelta(hours=-1))
+    expired = Token(
+        access_token="expired",
+        token_type="type",
+        expiry=datetime.now() + timedelta(hours=-1),
+    )
     data_plane = DataPlaneService()
     data_plane._tokens["method/params"] = expired
-    res = data_plane.get_data_plane_details("method", ["params"], lambda: info,
-                                            lambda a: MockRefresher(info.authorization_details).__call__(a))
+    res = data_plane.get_data_plane_details(
+        "method",
+        ["params"],
+        lambda: info,
+        lambda a: MockRefresher(info.authorization_details).__call__(a),
+    )
     assert res.endpoint_url == info.endpoint_url
     assert res.token == token
 
@@ -44,8 +60,12 @@ def test_token_expired():
 def test_info_cached():
     data_plane = DataPlaneService()
     data_plane._data_plane_info["method/params"] = info
-    res = data_plane.get_data_plane_details("method", ["params"], throw_exception,
-                                            lambda a: MockRefresher(info.authorization_details).__call__(a))
+    res = data_plane.get_data_plane_details(
+        "method",
+        ["params"],
+        throw_exception,
+        lambda a: MockRefresher(info.authorization_details).__call__(a),
+    )
     assert res.endpoint_url == info.endpoint_url
     assert res.token == token
 
diff --git a/tests/test_dbfs_mixins.py b/tests/test_dbfs_mixins.py
index 6bbaca7a2..4332c8475 100644
--- a/tests/test_dbfs_mixins.py
+++ b/tests/test_dbfs_mixins.py
@@ -9,57 +9,70 @@ def test_moving_dbfs_file_to_local_dir(config, tmp_path, mocker):
     from databricks.sdk import WorkspaceClient
     from databricks.sdk.service.files import FileInfo, ReadResponse
 
-    get_status = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status',
-                              return_value=FileInfo(path='a', is_dir=False, file_size=4))
+    get_status = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.get_status",
+        return_value=FileInfo(path="a", is_dir=False, file_size=4),
+    )
 
     def fake_read(path: str, *, length: int = None, offset: int = None):
-        assert path == 'a'
+        assert path == "a"
         assert length == 1048576
         if not offset:
-            return ReadResponse(bytes_read=4, data='aGVsbG8=')
+            return ReadResponse(bytes_read=4, data="aGVsbG8=")
         return ReadResponse(bytes_read=0)
 
-    mocker.patch('databricks.sdk.service.files.DbfsAPI.read', wraps=fake_read)
-    delete = mocker.patch('databricks.sdk.service.files.DbfsAPI.delete')
+    mocker.patch("databricks.sdk.service.files.DbfsAPI.read", wraps=fake_read)
+    delete = mocker.patch("databricks.sdk.service.files.DbfsAPI.delete")
 
     w = WorkspaceClient(config=config)
-    w.dbfs.move_('a', f'file:{tmp_path}', recursive=True)
+    w.dbfs.move_("a", f"file:{tmp_path}", recursive=True)
 
-    get_status.assert_called_with('a')
-    delete.assert_called_with('a', recursive=True)
+    get_status.assert_called_with("a")
+    delete.assert_called_with("a", recursive=True)
 
-    with (tmp_path / 'a').open('rb') as f:
-        assert f.read() == b'hello'
+    with (tmp_path / "a").open("rb") as f:
+        assert f.read() == b"hello"
 
 
 def test_moving_local_dir_to_dbfs(config, tmp_path, mocker):
     from databricks.sdk import WorkspaceClient
     from databricks.sdk.service.files import CreateResponse
 
-    with (tmp_path / 'a').open('wb') as f:
-        f.write(b'hello')
+    with (tmp_path / "a").open("wb") as f:
+        f.write(b"hello")
 
-    mocker.patch('databricks.sdk.service.files.DbfsAPI.create', return_value=CreateResponse(123))
+    mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.create",
+        return_value=CreateResponse(123),
+    )
 
-    get_status = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status', side_effect=NotFound())
-    add_block = mocker.patch('databricks.sdk.service.files.DbfsAPI.add_block')
-    close = mocker.patch('databricks.sdk.service.files.DbfsAPI.close')
+    get_status = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.get_status",
+        side_effect=NotFound(),
+    )
+    add_block = mocker.patch("databricks.sdk.service.files.DbfsAPI.add_block")
+    close = mocker.patch("databricks.sdk.service.files.DbfsAPI.close")
 
     w = WorkspaceClient(config=config)
-    w.dbfs.move_(f'file:{tmp_path}', 'a', recursive=True)
+    w.dbfs.move_(f"file:{tmp_path}", "a", recursive=True)
 
-    get_status.assert_called_with('a')
+    get_status.assert_called_with("a")
     close.assert_called_with(123)
-    add_block.assert_called_with(123, 'aGVsbG8=')
-    assert not (tmp_path / 'a').exists()
-
-
-@pytest.mark.parametrize('path,expected_type', [('/path/to/file', _DbfsPath),
-                                                ('/Volumes/path/to/file', _VolumesPath),
-                                                ('dbfs:/path/to/file', _DbfsPath),
-                                                ('dbfs:/Volumes/path/to/file', _VolumesPath),
-                                                ('file:/path/to/file', _LocalPath),
-                                                ('file:/Volumes/path/to/file', _LocalPath), ])
+    add_block.assert_called_with(123, "aGVsbG8=")
+    assert not (tmp_path / "a").exists()
+
+
+@pytest.mark.parametrize(
+    "path,expected_type",
+    [
+        ("/path/to/file", _DbfsPath),
+        ("/Volumes/path/to/file", _VolumesPath),
+        ("dbfs:/path/to/file", _DbfsPath),
+        ("dbfs:/Volumes/path/to/file", _VolumesPath),
+        ("file:/path/to/file", _LocalPath),
+        ("file:/Volumes/path/to/file", _LocalPath),
+    ],
+)
 def test_fs_path(config, path, expected_type):
     dbfs_ext = DbfsExt(config)
     assert isinstance(dbfs_ext._path(path), expected_type)
@@ -68,7 +81,7 @@ def test_fs_path(config, path, expected_type):
 def test_fs_path_invalid(config):
     dbfs_ext = DbfsExt(config)
     with pytest.raises(ValueError) as e:
-        dbfs_ext._path('s3://path/to/file')
+        dbfs_ext._path("s3://path/to/file")
     assert 'unsupported scheme "s3"' in str(e.value)
 
 
@@ -76,27 +89,30 @@ def test_dbfs_local_path_mkdir(config, tmp_path):
     from databricks.sdk import WorkspaceClient
 
     w = WorkspaceClient(config=config)
-    w.dbfs._path(f'file:{tmp_path}/test_dir').mkdir()
-    assert w.dbfs.exists(f'file:{tmp_path}/test_dir')
+    w.dbfs._path(f"file:{tmp_path}/test_dir").mkdir()
+    assert w.dbfs.exists(f"file:{tmp_path}/test_dir")
 
 
 def test_dbfs_exists(config, mocker):
     from databricks.sdk import WorkspaceClient
 
-    get_status = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status', side_effect=NotFound())
+    get_status = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.get_status",
+        side_effect=NotFound(),
+    )
 
     client = WorkspaceClient(config=config)
-    client.dbfs.exists('/abc/def/ghi')
+    client.dbfs.exists("/abc/def/ghi")
 
-    get_status.assert_called_with('/abc/def/ghi')
+    get_status.assert_called_with("/abc/def/ghi")
 
 
 def test_volume_exists(config, mocker):
     from databricks.sdk import WorkspaceClient
 
-    get_metadata = mocker.patch('databricks.sdk.service.files.FilesAPI.get_metadata')
+    get_metadata = mocker.patch("databricks.sdk.service.files.FilesAPI.get_metadata")
 
     client = WorkspaceClient(config=config)
-    client.dbfs.exists('/Volumes/abc/def/ghi')
+    client.dbfs.exists("/Volumes/abc/def/ghi")
 
-    get_metadata.assert_called_with('/Volumes/abc/def/ghi')
+    get_metadata.assert_called_with("/Volumes/abc/def/ghi")
diff --git a/tests/test_dbutils.py b/tests/test_dbutils.py
index 2900fdb77..9cd3d32fd 100644
--- a/tests/test_dbutils.py
+++ b/tests/test_dbutils.py
@@ -9,65 +9,74 @@
 @pytest.fixture
 def dbutils(config):
     from databricks.sdk.dbutils import RemoteDbUtils
+
     return RemoteDbUtils(config)
 
 
 def test_fs_cp(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.mixins.files.DbfsExt.copy')
+    inner = mocker.patch("databricks.sdk.mixins.files.DbfsExt.copy")
 
-    dbutils.fs.cp('a', 'b', recurse=True)
+    dbutils.fs.cp("a", "b", recurse=True)
 
-    inner.assert_called_with('a', 'b', recursive=True)
+    inner.assert_called_with("a", "b", recursive=True)
 
 
 def test_fs_head(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.read',
-                         return_value=ReadResponse(data='aGVsbG8=', bytes_read=5))
-    inner2 = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status',
-                          return_value=FileInfo(path='a', is_dir=False, file_size=5))
+    inner = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.read",
+        return_value=ReadResponse(data="aGVsbG8=", bytes_read=5),
+    )
+    inner2 = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.get_status",
+        return_value=FileInfo(path="a", is_dir=False, file_size=5),
+    )
 
-    result = dbutils.fs.head('a')
+    result = dbutils.fs.head("a")
 
-    inner.assert_called_with('a', length=65536, offset=0)
-    inner2.assert_called_with('a')
-    assert result == 'hello'
+    inner.assert_called_with("a", length=65536, offset=0)
+    inner2.assert_called_with("a")
+    assert result == "hello"
 
 
 def test_fs_ls(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.list',
-                         return_value=[
-                             FileInfo(path='a/b', file_size=10, modification_time=20),
-                             FileInfo(path='a/c', file_size=30, modification_time=40),
-                         ])
-    inner2 = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status',
-                          side_effect=[
-                              FileInfo(path='a', is_dir=True, file_size=5),
-                              FileInfo(path='a/b', is_dir=False, file_size=5),
-                              FileInfo(path='a/c', is_dir=False, file_size=5),
-                          ])
-
-    result = dbutils.fs.ls('a')
-
-    inner.assert_called_with('a')
+    inner = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.list",
+        return_value=[
+            FileInfo(path="a/b", file_size=10, modification_time=20),
+            FileInfo(path="a/c", file_size=30, modification_time=40),
+        ],
+    )
+    inner2 = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.get_status",
+        side_effect=[
+            FileInfo(path="a", is_dir=True, file_size=5),
+            FileInfo(path="a/b", is_dir=False, file_size=5),
+            FileInfo(path="a/c", is_dir=False, file_size=5),
+        ],
+    )
+
+    result = dbutils.fs.ls("a")
+
+    inner.assert_called_with("a")
     assert len(result) == 2
-    assert result[0] == DBUtilsFileInfo('a/b', 'b', 10, 20)
-    assert result[1] == DBUtilsFileInfo('a/c', 'c', 30, 40)
+    assert result[0] == DBUtilsFileInfo("a/b", "b", 10, 20)
+    assert result[1] == DBUtilsFileInfo("a/c", "c", 30, 40)
 
 
 def test_fs_mkdirs(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.mkdirs')
+    inner = mocker.patch("databricks.sdk.service.files.DbfsAPI.mkdirs")
 
-    dbutils.fs.mkdirs('a')
+    dbutils.fs.mkdirs("a")
 
-    inner.assert_called_with('a')
+    inner.assert_called_with("a")
 
 
 def test_fs_mv(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.mixins.files.DbfsExt.move_')
+    inner = mocker.patch("databricks.sdk.mixins.files.DbfsExt.move_")
 
-    dbutils.fs.mv('a', 'b')
+    dbutils.fs.mv("a", "b")
 
-    inner.assert_called_with('a', 'b', recursive=False, overwrite=True)
+    inner.assert_called_with("a", "b", recursive=False, overwrite=True)
 
 
 def test_fs_put(dbutils, mocker):
@@ -85,27 +94,29 @@ def write(self, contents):
             self._written = contents
 
     mock_open = _MockOpen()
-    inner = mocker.patch('databricks.sdk.mixins.files.DbfsExt.open', return_value=mock_open)
+    inner = mocker.patch("databricks.sdk.mixins.files.DbfsExt.open", return_value=mock_open)
 
-    dbutils.fs.put('a', 'b')
+    dbutils.fs.put("a", "b")
 
-    inner.assert_called_with('a', overwrite=False, write=True)
-    assert mock_open._written == b'b'
+    inner.assert_called_with("a", overwrite=False, write=True)
+    assert mock_open._written == b"b"
 
 
 def test_fs_rm(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.service.files.DbfsAPI.delete')
-    inner2 = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status',
-                          return_value=FileInfo(path='a', is_dir=False, file_size=5))
+    inner = mocker.patch("databricks.sdk.service.files.DbfsAPI.delete")
+    inner2 = mocker.patch(
+        "databricks.sdk.service.files.DbfsAPI.get_status",
+        return_value=FileInfo(path="a", is_dir=False, file_size=5),
+    )
 
-    dbutils.fs.rm('a')
+    dbutils.fs.rm("a")
 
-    inner.assert_called_with('a', recursive=False)
+    inner.assert_called_with("a", recursive=False)
 
 
-@raises('cluster_id is required in the configuration. Config: host=http://localhost, auth_type=noop')
+@raises("cluster_id is required in the configuration. Config: host=http://localhost, auth_type=noop")
 def test_fs_mount_without_cluster_fails(dbutils):
-    dbutils.fs.mount('s3://foo', 'bar')
+    dbutils.fs.mount("s3://foo", "bar")
 
 
 @pytest.fixture
@@ -119,130 +130,163 @@ def dbutils_proxy(mocker):
 
     from .conftest import noop_credentials
 
-    cluster_get = mocker.patch('databricks.sdk.service.compute.ClustersAPI.get',
-                               return_value=ClusterDetails(state=State.RUNNING))
-    context_create = mocker.patch('databricks.sdk.service.compute.CommandExecutionAPI.create',
-                                  return_value=Wait(lambda **kwargs: Created('y')))
+    cluster_get = mocker.patch(
+        "databricks.sdk.service.compute.ClustersAPI.get",
+        return_value=ClusterDetails(state=State.RUNNING),
+    )
+    context_create = mocker.patch(
+        "databricks.sdk.service.compute.CommandExecutionAPI.create",
+        return_value=Wait(lambda **kwargs: Created("y")),
+    )
 
     def inner(results_data: any, expect_command: str):
         import json
+
         command_execute = mocker.patch(
-            'databricks.sdk.service.compute.CommandExecutionAPI.execute',
-            return_value=Wait(lambda **kwargs: CommandStatusResponse(
-                results=Results(data=json.dumps(results_data)), status=CommandStatus.FINISHED)))
+            "databricks.sdk.service.compute.CommandExecutionAPI.execute",
+            return_value=Wait(
+                lambda **kwargs: CommandStatusResponse(
+                    results=Results(data=json.dumps(results_data)),
+                    status=CommandStatus.FINISHED,
+                )
+            ),
+        )
 
         def assertions():
-            cluster_get.assert_called_with('x')
-            context_create.assert_called_with(cluster_id='x', language=Language.PYTHON)
-            command_execute.assert_called_with(cluster_id='x',
-                                               context_id='y',
-                                               language=Language.PYTHON,
-                                               command=expect_command)
+            cluster_get.assert_called_with("x")
+            context_create.assert_called_with(cluster_id="x", language=Language.PYTHON)
+            command_execute.assert_called_with(
+                cluster_id="x",
+                context_id="y",
+                language=Language.PYTHON,
+                command=expect_command,
+            )
 
         dbutils = RemoteDbUtils(
-            Config(host='http://localhost', cluster_id='x', credentials_strategy=noop_credentials))
+            Config(
+                host="http://localhost",
+                cluster_id="x",
+                credentials_strategy=noop_credentials,
+            )
+        )
         return dbutils, assertions
 
     return inner
 
 
 def test_fs_mount(dbutils_proxy):
-    command = ('\n'
-               '        import json\n'
-               '        (args, kwargs) = json.loads(\'[["s3://foo", "bar"], {}]\')\n'
-               '        result = dbutils.fs.mount(*args, **kwargs)\n'
-               '        dbutils.notebook.exit(json.dumps(result))\n'
-               '        ')
+    command = (
+        "\n"
+        "        import json\n"
+        '        (args, kwargs) = json.loads(\'[["s3://foo", "bar"], {}]\')\n'
+        "        result = dbutils.fs.mount(*args, **kwargs)\n"
+        "        dbutils.notebook.exit(json.dumps(result))\n"
+        "        "
+    )
     dbutils, assertions = dbutils_proxy({}, command)
 
-    dbutils.fs.mount('s3://foo', 'bar')
+    dbutils.fs.mount("s3://foo", "bar")
 
     assertions()
 
 
 def test_fs_update_mount(dbutils_proxy):
-    command = ('\n'
-               '        import json\n'
-               '        (args, kwargs) = json.loads(\'[["s3://foo2", "bar"], {}]\')\n'
-               '        result = dbutils.fs.updateMount(*args, **kwargs)\n'
-               '        dbutils.notebook.exit(json.dumps(result))\n'
-               '        ')
+    command = (
+        "\n"
+        "        import json\n"
+        '        (args, kwargs) = json.loads(\'[["s3://foo2", "bar"], {}]\')\n'
+        "        result = dbutils.fs.updateMount(*args, **kwargs)\n"
+        "        dbutils.notebook.exit(json.dumps(result))\n"
+        "        "
+    )
     dbutils, assertions = dbutils_proxy({}, command)
 
-    dbutils.fs.updateMount('s3://foo2', 'bar')
+    dbutils.fs.updateMount("s3://foo2", "bar")
 
     assertions()
 
 
 def test_fs_mounts(dbutils_proxy):
-    command = ('\n'
-               '        import json\n'
-               "        (args, kwargs) = json.loads('[[], {}]')\n"
-               '        result = dbutils.fs.mounts(*args, **kwargs)\n'
-               '        dbutils.notebook.exit(json.dumps(result))\n'
-               '        ')
-    dbutils, assertions = dbutils_proxy([('a', 'b', 'c'), ('d', 'e', 'f'), ], command)
+    command = (
+        "\n"
+        "        import json\n"
+        "        (args, kwargs) = json.loads('[[], {}]')\n"
+        "        result = dbutils.fs.mounts(*args, **kwargs)\n"
+        "        dbutils.notebook.exit(json.dumps(result))\n"
+        "        "
+    )
+    dbutils, assertions = dbutils_proxy(
+        [
+            ("a", "b", "c"),
+            ("d", "e", "f"),
+        ],
+        command,
+    )
 
     mounts = dbutils.fs.mounts()
 
     assert len(mounts) == 2
-    assert mounts[0].mountPoint == 'a'
-    assert mounts[0].source == 'b'
+    assert mounts[0].mountPoint == "a"
+    assert mounts[0].source == "b"
 
     assertions()
 
 
 def test_any_proxy(dbutils_proxy):
-    command = ('\n'
-               '        import json\n'
-               '        (args, kwargs) = json.loads(\'[["a"], {}]\')\n'
-               '        result = dbutils.notebook.exit(*args, **kwargs)\n'
-               '        dbutils.notebook.exit(json.dumps(result))\n'
-               '        ')
-    dbutils, assertions = dbutils_proxy('a', command)
+    command = (
+        "\n"
+        "        import json\n"
+        "        (args, kwargs) = json.loads('[[\"a\"], {}]')\n"
+        "        result = dbutils.notebook.exit(*args, **kwargs)\n"
+        "        dbutils.notebook.exit(json.dumps(result))\n"
+        "        "
+    )
+    dbutils, assertions = dbutils_proxy("a", command)
 
     param = dbutils.notebook.exit("a")
 
-    assert param == 'a'
+    assert param == "a"
 
     assertions()
 
 
 def test_secrets_get_and_redacting_logs(dbutils, mocker):
-    inner = mocker.patch('databricks.sdk.core.ApiClient.do', return_value={'value': 'aGVsbG8='})
+    inner = mocker.patch("databricks.sdk.core.ApiClient.do", return_value={"value": "aGVsbG8="})
 
-    value = dbutils.secrets.get('foo', 'bar')
+    value = dbutils.secrets.get("foo", "bar")
 
-    inner.assert_called_with('GET', '/api/2.0/secrets/get', query={'key': 'bar', 'scope': 'foo'})
+    inner.assert_called_with("GET", "/api/2.0/secrets/get", query={"key": "bar", "scope": "foo"})
 
-    assert value == 'hello'
+    assert value == "hello"
 
 
 def test_jobs_task_values_set(dbutils):
-    dbutils.jobs.taskValues.set('key', 'value')
+    dbutils.jobs.taskValues.set("key", "value")
 
 
 def test_jobs_task_values_get(dbutils):
-    assert dbutils.jobs.taskValues.get('taskKey', 'key', debugValue='debug') == 'debug'
+    assert dbutils.jobs.taskValues.get("taskKey", "key", debugValue="debug") == "debug"
 
-    dbutils.jobs.taskValues.set('key', 'value')
+    dbutils.jobs.taskValues.set("key", "value")
 
     # Expect `get` to always return the `debugValue`` when calling outside of a job context and not what was previously set using `set`
-    assert dbutils.jobs.taskValues.get('taskKey', 'key', debugValue='debug') == 'debug'
+    assert dbutils.jobs.taskValues.get("taskKey", "key", debugValue="debug") == "debug"
 
 
 def test_jobs_task_values_get_throws(dbutils):
     try:
-        dbutils.jobs.taskValues.get('taskKey', 'key')
+        dbutils.jobs.taskValues.get("taskKey", "key")
         assert False
     except TypeError as e:
-        assert str(
-            e) == 'Must pass debugValue when calling get outside of a job context. debugValue cannot be None.'
+        assert str(e) == "Must pass debugValue when calling get outside of a job context. debugValue cannot be None."
 
 
 def test_dbutils_proxy_overrides(dbutils, mocker, restorable_env):
     import os
+
     os.environ["DATABRICKS_SOURCE_FILE"] = "test_source_file"
-    mocker.patch('databricks.sdk.dbutils.RemoteDbUtils._cluster_id', return_value="test_cluster_id")
-    assert dbutils.notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get(
-    ) == "test_source_file"
+    mocker.patch(
+        "databricks.sdk.dbutils.RemoteDbUtils._cluster_id",
+        return_value="test_cluster_id",
+    )
+    assert dbutils.notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get() == "test_source_file"
diff --git a/tests/test_environments.py b/tests/test_environments.py
index c14426f0d..108a39790 100644
--- a/tests/test_environments.py
+++ b/tests/test_environments.py
@@ -15,5 +15,9 @@ def test_environment_azure():
 
 
 def test_default_environment_can_be_overridden():
-    c = Config(host="https://test.cloud.databricks.com", token="token", databricks_environment=ALL_ENVS[1])
+    c = Config(
+        host="https://test.cloud.databricks.com",
+        token="token",
+        databricks_environment=ALL_ENVS[1],
+    )
     assert c.environment == ALL_ENVS[1]
diff --git a/tests/test_errors.py b/tests/test_errors.py
index 881f016f3..0e775bd50 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -8,128 +8,220 @@
 from databricks.sdk import errors
 
 
-def fake_response(method: str,
-                  status_code: int,
-                  response_body: str,
-                  path: Optional[str] = None) -> requests.Response:
-    return fake_raw_response(method, status_code, response_body.encode('utf-8'), path)
+def fake_response(
+    method: str,
+    status_code: int,
+    response_body: str,
+    path: Optional[str] = None,
+) -> requests.Response:
+    return fake_raw_response(method, status_code, response_body.encode("utf-8"), path)
 
 
-def fake_raw_response(method: str,
-                      status_code: int,
-                      response_body: bytes,
-                      path: Optional[str] = None) -> requests.Response:
+def fake_raw_response(
+    method: str,
+    status_code: int,
+    response_body: bytes,
+    path: Optional[str] = None,
+) -> requests.Response:
     resp = requests.Response()
     resp.status_code = status_code
-    resp.reason = http.client.responses.get(status_code, '')
+    resp.reason = http.client.responses.get(status_code, "")
     if path is None:
-        path = '/api/2.0/service'
+        path = "/api/2.0/service"
     resp.request = requests.Request(method, f"https://databricks.com{path}").prepare()
     resp._content = response_body
     return resp
 
 
-def fake_valid_response(method: str,
-                        status_code: int,
-                        error_code: str,
-                        message: str,
-                        path: Optional[str] = None) -> requests.Response:
-    body = {'message': message}
+def fake_valid_response(
+    method: str,
+    status_code: int,
+    error_code: str,
+    message: str,
+    path: Optional[str] = None,
+) -> requests.Response:
+    body = {"message": message}
     if error_code:
-        body['error_code'] = error_code
+        body["error_code"] = error_code
     return fake_response(method, status_code, json.dumps(body), path)
 
 
 def make_private_link_response() -> requests.Response:
     resp = requests.Response()
-    resp.url = 'https://databricks.com/login.html?error=private-link-validation-error'
-    resp.request = requests.Request('GET', 'https://databricks.com/api/2.0/service').prepare()
-    resp._content = b'{}'
+    resp.url = "https://databricks.com/login.html?error=private-link-validation-error"
+    resp.request = requests.Request("GET", "https://databricks.com/api/2.0/service").prepare()
+    resp._content = b"{}"
     resp.status_code = 200
     return resp
 
 
 # This should be `(int, str, type)` but doesn't work in Python 3.7-3.8.
-base_subclass_test_cases: List[Tuple[int, str,
-                                     type]] = [(400, '', errors.BadRequest),
-                                               (400, 'INVALID_PARAMETER_VALUE', errors.BadRequest),
-                                               (400, 'INVALID_PARAMETER_VALUE', errors.InvalidParameterValue),
-                                               (400, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
-                                               (400, '', IOError), (401, '', errors.Unauthenticated),
-                                               (401, '', IOError), (403, '', errors.PermissionDenied),
-                                               (403, '', IOError), (404, '', errors.NotFound),
-                                               (404, '', IOError), (409, '', errors.ResourceConflict),
-                                               (409, 'ABORTED', errors.Aborted),
-                                               (409, 'ABORTED', errors.ResourceConflict),
-                                               (409, 'ALREADY_EXISTS', errors.AlreadyExists),
-                                               (409, 'ALREADY_EXISTS', errors.ResourceConflict),
-                                               (409, '', IOError), (429, '', errors.TooManyRequests),
-                                               (429, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
-                                               (429, 'REQUEST_LIMIT_EXCEEDED', errors.RequestLimitExceeded),
-                                               (429, 'RESOURCE_EXHAUSTED', errors.TooManyRequests),
-                                               (429, 'RESOURCE_EXHAUSTED', errors.ResourceExhausted),
-                                               (429, '', IOError), (499, '', errors.Cancelled),
-                                               (499, '', IOError), (500, '', errors.InternalError),
-                                               (500, 'UNKNOWN', errors.InternalError),
-                                               (500, 'UNKNOWN', errors.Unknown),
-                                               (500, 'DATA_LOSS', errors.InternalError),
-                                               (500, 'DATA_LOSS', errors.DataLoss), (500, '', IOError),
-                                               (501, '', errors.NotImplemented), (501, '', IOError),
-                                               (503, '', errors.TemporarilyUnavailable), (503, '', IOError),
-                                               (504, '', errors.DeadlineExceeded), (504, '', IOError),
-                                               (444, '', errors.DatabricksError), (444, '', IOError), ]
+base_subclass_test_cases: List[Tuple[int, str, type]] = [
+    (400, "", errors.BadRequest),
+    (400, "INVALID_PARAMETER_VALUE", errors.BadRequest),
+    (400, "INVALID_PARAMETER_VALUE", errors.InvalidParameterValue),
+    (400, "REQUEST_LIMIT_EXCEEDED", errors.TooManyRequests),
+    (400, "", IOError),
+    (401, "", errors.Unauthenticated),
+    (401, "", IOError),
+    (403, "", errors.PermissionDenied),
+    (403, "", IOError),
+    (404, "", errors.NotFound),
+    (404, "", IOError),
+    (409, "", errors.ResourceConflict),
+    (409, "ABORTED", errors.Aborted),
+    (409, "ABORTED", errors.ResourceConflict),
+    (409, "ALREADY_EXISTS", errors.AlreadyExists),
+    (409, "ALREADY_EXISTS", errors.ResourceConflict),
+    (409, "", IOError),
+    (429, "", errors.TooManyRequests),
+    (429, "REQUEST_LIMIT_EXCEEDED", errors.TooManyRequests),
+    (429, "REQUEST_LIMIT_EXCEEDED", errors.RequestLimitExceeded),
+    (429, "RESOURCE_EXHAUSTED", errors.TooManyRequests),
+    (429, "RESOURCE_EXHAUSTED", errors.ResourceExhausted),
+    (429, "", IOError),
+    (499, "", errors.Cancelled),
+    (499, "", IOError),
+    (500, "", errors.InternalError),
+    (500, "UNKNOWN", errors.InternalError),
+    (500, "UNKNOWN", errors.Unknown),
+    (500, "DATA_LOSS", errors.InternalError),
+    (500, "DATA_LOSS", errors.DataLoss),
+    (500, "", IOError),
+    (501, "", errors.NotImplemented),
+    (501, "", IOError),
+    (503, "", errors.TemporarilyUnavailable),
+    (503, "", IOError),
+    (504, "", errors.DeadlineExceeded),
+    (504, "", IOError),
+    (444, "", errors.DatabricksError),
+    (444, "", IOError),
+]
 
-subclass_test_cases = [(fake_valid_response('GET', x[0], x[1], 'nope'), x[2], 'nope')
-                       for x in base_subclass_test_cases]
+subclass_test_cases = [(fake_valid_response("GET", x[0], x[1], "nope"), x[2], "nope") for x in base_subclass_test_cases]
 
 
 @pytest.mark.parametrize(
-    'response, expected_error, expected_message', subclass_test_cases +
-    [(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
-     (fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
-     (fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
-     (make_private_link_response(), errors.PrivateLinkValidationError,
-      ('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
-       'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
-       'endpoint. For more information, see '
-       'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
-      ),
-     (fake_valid_response(
-         'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
-         '/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
-     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
-                          '/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
-     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
-                          '/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
-     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
-                          '/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
-     (fake_response(
-         'GET', 400,
-         'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
-     ), errors.BadRequest,
-      'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
-      ),
-     (fake_response('GET', 400, '
Worker environment not ready
'), errors.BadRequest, - 'Worker environment not ready'), - (fake_response('GET', 400, 'this is not a real response'), errors.BadRequest, - ('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. ' - 'Please report this issue with the following debugging information to the SDK issue tracker at ' - 'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n' - '< 400 Bad Request\n' - '< this is not a real response```')), - (fake_response( - 'GET', 404, - json.dumps({ - 'detail': 'Group with id 1234 is not found', - 'status': '404', - 'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error'] - })), errors.NotFound, 'None Group with id 1234 is not found'), - (fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound, - 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```' - ), - (fake_raw_response('GET', 404, b'\x80'), errors.NotFound, - 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```' - )]) + "response, expected_error, expected_message", + subclass_test_cases + + [ + (fake_response("GET", 400, ""), errors.BadRequest, "Bad Request"), + ( + fake_valid_response("GET", 417, "WHOOPS", "nope"), + errors.DatabricksError, + "nope", + ), + ( + fake_valid_response("GET", 522, "", "nope"), + errors.DatabricksError, + "nope", + ), + ( + make_private_link_response(), + errors.PrivateLinkValidationError, + ( + "The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. " + "Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC " + "endpoint. For more information, see " + "https://docs.databricks.com/en/security/network/classic/privatelink.html." + ), + ), + ( + fake_valid_response( + "GET", + 400, + "INVALID_PARAMETER_VALUE", + "Cluster abcde does not exist", + "/api/2.0/clusters/get", + ), + errors.ResourceDoesNotExist, + "Cluster abcde does not exist", + ), + ( + fake_valid_response( + "GET", + 400, + "INVALID_PARAMETER_VALUE", + "Job abcde does not exist", + "/api/2.0/jobs/get", + ), + errors.ResourceDoesNotExist, + "Job abcde does not exist", + ), + ( + fake_valid_response( + "GET", + 400, + "INVALID_PARAMETER_VALUE", + "Job abcde does not exist", + "/api/2.1/jobs/get", + ), + errors.ResourceDoesNotExist, + "Job abcde does not exist", + ), + ( + fake_valid_response( + "GET", + 400, + "INVALID_PARAMETER_VALUE", + "Invalid spark version", + "/api/2.1/jobs/get", + ), + errors.InvalidParameterValue, + "Invalid spark version", + ), + ( + fake_response( + "GET", + 400, + "MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list", + ), + errors.BadRequest, + "vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list", + ), + ( + fake_response("GET", 400, "
Worker environment not ready
"), + errors.BadRequest, + "Worker environment not ready", + ), + ( + fake_response("GET", 400, "this is not a real response"), + errors.BadRequest, + ( + "unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. " + "Please report this issue with the following debugging information to the SDK issue tracker at " + "https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n" + "< 400 Bad Request\n" + "< this is not a real response```" + ), + ), + ( + fake_response( + "GET", + 404, + json.dumps( + { + "detail": "Group with id 1234 is not found", + "status": "404", + "schemas": ["urn:ietf:params:scim:api:messages:2.0:Error"], + } + ), + ), + errors.NotFound, + "None Group with id 1234 is not found", + ), + ( + fake_response("GET", 404, json.dumps("This is JSON but not a dictionary")), + errors.NotFound, + 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```', + ), + ( + fake_raw_response("GET", 404, b"\x80"), + errors.NotFound, + "unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```", + ), + ], +) def test_get_api_error(response, expected_error, expected_message): parser = errors._Parser() with pytest.raises(errors.DatabricksError) as e: diff --git a/tests/test_files.py b/tests/test_files.py index f4d916f6f..50e6cb470 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -22,10 +22,17 @@ def __init__(self, offset: int): class DownloadTestCase: - def __init__(self, name: str, enable_new_client: bool, file_size: int, - failure_at_absolute_offset: List[int], max_recovers_total: Union[int, None], - max_recovers_without_progressing: Union[int, None], expected_success: bool, - expected_requested_offsets: List[int]): + def __init__( + self, + name: str, + enable_new_client: bool, + file_size: int, + failure_at_absolute_offset: List[int], + max_recovers_total: Union[int, None], + max_recovers_without_progressing: Union[int, None], + expected_success: bool, + expected_requested_offsets: List[int], + ): self.name = name self.enable_new_client = enable_new_client self.file_size = file_size @@ -53,17 +60,17 @@ def run(self, config: Config): response = w.files.download("/test").contents if self.expected_success: actual_content = response.read() - assert (len(actual_content) == len(session.content)) - assert (actual_content == session.content) + assert len(actual_content) == len(session.content) + assert actual_content == session.content else: with pytest.raises(RequestException): response.read() received_requests = session.received_requests - assert (len(self.expected_requested_offsets) == len(received_requests)) + assert len(self.expected_requested_offsets) == len(received_requests) for idx, requested_offset in enumerate(self.expected_requested_offsets): - assert (requested_offset == received_requests[idx]._offset) + assert requested_offset == received_requests[idx]._offset class MockSession: @@ -73,27 +80,29 @@ def __init__(self, test_case: DownloadTestCase): self.received_requests: List[RequestData] = [] self.content: bytes = os.urandom(self.test_case.file_size) self.failure_pointer = 0 - self.last_modified = 'Thu, 28 Nov 2024 16:39:14 GMT' + self.last_modified = "Thu, 28 Nov 2024 16:39:14 GMT" # following the signature of Session.request() - def request(self, - method, - url, - params=None, - data=None, - headers=None, - cookies=None, - files=None, - auth=None, - timeout=None, - allow_redirects=True, - proxies=None, - hooks=None, - stream=None, - verify=None, - cert=None, - json=None): - assert method == 'GET' + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + assert method == "GET" assert stream == True offset = 0 @@ -106,7 +115,7 @@ def request(self, raise Exception("Unexpected range header: " + range) if "If-Unmodified-Since" in headers: - assert (headers["If-Unmodified-Since"] == self.last_modified) + assert headers["If-Unmodified-Since"] == self.last_modified else: raise Exception("If-Unmodified-Since header should be passed along with Range") @@ -124,7 +133,7 @@ class MockRequest: def __init__(self, url: str): self.url = url - self.method = 'GET' + self.method = "GET" self.headers = dict() self.body = None @@ -136,11 +145,11 @@ def __init__(self, session: MockSession, offset: int, request: MockRequest): self.offset = offset self.request = request self.status_code = 200 - self.reason = 'OK' + self.reason = "OK" self.headers = dict() - self.headers['Content-Length'] = len(session.content) - offset - self.headers['Content-Type'] = 'application/octet-stream' - self.headers['Last-Modified'] = session.last_modified + self.headers["Content-Length"] = len(session.content) - offset + self.headers["Content-Type"] = "application/octet-stream" + self.headers["Last-Modified"] = session.last_modified self.ok = True self.url = request.url @@ -161,12 +170,12 @@ def __next__(self): if start_offset == len(self.response.session.content): raise StopIteration - end_offset = start_offset + self.chunk_size # exclusive, might be out of range + end_offset = start_offset + self.chunk_size # exclusive, might be out of range - if self.response.session.failure_pointer < len( - self.response.session.test_case.failure_at_absolute_offset): + if self.response.session.failure_pointer < len(self.response.session.test_case.failure_at_absolute_offset): failure_after_byte = self.response.session.test_case.failure_at_absolute_offset[ - self.response.session.failure_pointer] + self.response.session.failure_pointer + ] if failure_after_byte < end_offset: self.response.session.failure_pointer += 1 raise RequestException("Fake error") @@ -180,161 +189,206 @@ def close(self): class _Constants: - underlying_chunk_size = 1024 * 1024 # see ticket #832 + underlying_chunk_size = 1024 * 1024 # see ticket #832 @pytest.mark.parametrize( "test_case", [ - DownloadTestCase(name="Old client: no failures, file of 5 bytes", - enable_new_client=False, - file_size=5, - failure_at_absolute_offset=[], - max_recovers_total=0, - max_recovers_without_progressing=0, - expected_success=True, - expected_requested_offsets=[0]), - DownloadTestCase(name="Old client: no failures, file of 1.5 chunks", - enable_new_client=False, - file_size=int(1.5 * _Constants.underlying_chunk_size), - failure_at_absolute_offset=[], - max_recovers_total=0, - max_recovers_without_progressing=0, - expected_success=True, - expected_requested_offsets=[0]), + DownloadTestCase( + name="Old client: no failures, file of 5 bytes", + enable_new_client=False, + file_size=5, + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0], + ), + DownloadTestCase( + name="Old client: no failures, file of 1.5 chunks", + enable_new_client=False, + file_size=int(1.5 * _Constants.underlying_chunk_size), + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0], + ), DownloadTestCase( name="Old client: failure", enable_new_client=False, file_size=1024, failure_at_absolute_offset=[100], - max_recovers_total=None, # unlimited but ignored - max_recovers_without_progressing=None, # unlimited but ignored + max_recovers_total=None, # unlimited but ignored + max_recovers_without_progressing=None, # unlimited but ignored + expected_success=False, + expected_requested_offsets=[0], + ), + DownloadTestCase( + name="New client: no failures, file of 5 bytes", + enable_new_client=True, + file_size=5, + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0], + ), + DownloadTestCase( + name="New client: no failures, file of 1 Kb", + enable_new_client=True, + file_size=1024, + max_recovers_total=None, + max_recovers_without_progressing=None, + failure_at_absolute_offset=[], + expected_success=True, + expected_requested_offsets=[0], + ), + DownloadTestCase( + name="New client: no failures, file of 1.5 chunks", + enable_new_client=True, + file_size=int(1.5 * _Constants.underlying_chunk_size), + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0], + ), + DownloadTestCase( + name="New client: no failures, file of 10 chunks", + enable_new_client=True, + file_size=10 * _Constants.underlying_chunk_size, + failure_at_absolute_offset=[], + max_recovers_total=0, + max_recovers_without_progressing=0, + expected_success=True, + expected_requested_offsets=[0], + ), + DownloadTestCase( + name="New client: recovers are disabled, first failure leads to download abort", + enable_new_client=True, + file_size=10000, + failure_at_absolute_offset=[5], + max_recovers_total=0, + max_recovers_without_progressing=0, expected_success=False, - expected_requested_offsets=[0]), - DownloadTestCase(name="New client: no failures, file of 5 bytes", - enable_new_client=True, - file_size=5, - failure_at_absolute_offset=[], - max_recovers_total=0, - max_recovers_without_progressing=0, - expected_success=True, - expected_requested_offsets=[0]), - DownloadTestCase(name="New client: no failures, file of 1 Kb", - enable_new_client=True, - file_size=1024, - max_recovers_total=None, - max_recovers_without_progressing=None, - failure_at_absolute_offset=[], - expected_success=True, - expected_requested_offsets=[0]), - DownloadTestCase(name="New client: no failures, file of 1.5 chunks", - enable_new_client=True, - file_size=int(1.5 * _Constants.underlying_chunk_size), - failure_at_absolute_offset=[], - max_recovers_total=0, - max_recovers_without_progressing=0, - expected_success=True, - expected_requested_offsets=[0]), - DownloadTestCase(name="New client: no failures, file of 10 chunks", - enable_new_client=True, - file_size=10 * _Constants.underlying_chunk_size, - failure_at_absolute_offset=[], - max_recovers_total=0, - max_recovers_without_progressing=0, - expected_success=True, - expected_requested_offsets=[0]), - DownloadTestCase(name="New client: recovers are disabled, first failure leads to download abort", - enable_new_client=True, - file_size=10000, - failure_at_absolute_offset=[5], - max_recovers_total=0, - max_recovers_without_progressing=0, - expected_success=False, - expected_requested_offsets=[0]), + expected_requested_offsets=[0], + ), DownloadTestCase( name="New client: unlimited recovers allowed", enable_new_client=True, file_size=_Constants.underlying_chunk_size * 5, # causes errors on requesting the third chunk failure_at_absolute_offset=[ - _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1, - _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size + 1, + _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size + 1, _Constants.underlying_chunk_size * 3, ], max_recovers_total=None, max_recovers_without_progressing=None, expected_success=True, expected_requested_offsets=[ - 0, 0, 0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 3 - ]), + 0, + 0, + 0, + 0, + _Constants.underlying_chunk_size, + _Constants.underlying_chunk_size * 3, + ], + ), DownloadTestCase( name="New client: we respect limit on total recovers when progressing", enable_new_client=True, file_size=_Constants.underlying_chunk_size * 10, failure_at_absolute_offset=[ 1, - _Constants.underlying_chunk_size + 1, # progressing - _Constants.underlying_chunk_size * 2 + 1, # progressing - _Constants.underlying_chunk_size * 3 + 1 # progressing + _Constants.underlying_chunk_size + 1, # progressing + _Constants.underlying_chunk_size * 2 + 1, # progressing + _Constants.underlying_chunk_size * 3 + 1, # progressing ], max_recovers_total=3, max_recovers_without_progressing=None, expected_success=False, expected_requested_offsets=[ - 0, 0, _Constants.underlying_chunk_size * 1, _Constants.underlying_chunk_size * 2 - ]), - DownloadTestCase(name="New client: we respect limit on total recovers when not progressing", - enable_new_client=True, - file_size=_Constants.underlying_chunk_size * 10, - failure_at_absolute_offset=[1, 1, 1, 1], - max_recovers_total=3, - max_recovers_without_progressing=None, - expected_success=False, - expected_requested_offsets=[0, 0, 0, 0]), - DownloadTestCase(name="New client: we respect limit on non-progressing recovers", - enable_new_client=True, - file_size=_Constants.underlying_chunk_size * 2, - failure_at_absolute_offset=[ - _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1, - _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1 - ], - max_recovers_total=None, - max_recovers_without_progressing=3, - expected_success=False, - expected_requested_offsets=[0, 0, 0, 0]), + 0, + 0, + _Constants.underlying_chunk_size * 1, + _Constants.underlying_chunk_size * 2, + ], + ), + DownloadTestCase( + name="New client: we respect limit on total recovers when not progressing", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 10, + failure_at_absolute_offset=[1, 1, 1, 1], + max_recovers_total=3, + max_recovers_without_progressing=None, + expected_success=False, + expected_requested_offsets=[0, 0, 0, 0], + ), + DownloadTestCase( + name="New client: we respect limit on non-progressing recovers", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 2, + failure_at_absolute_offset=[ + _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, + _Constants.underlying_chunk_size - 1, + ], + max_recovers_total=None, + max_recovers_without_progressing=3, + expected_success=False, + expected_requested_offsets=[0, 0, 0, 0], + ), DownloadTestCase( name="New client: non-progressing recovers count is reset when progressing", enable_new_client=True, file_size=_Constants.underlying_chunk_size * 10, failure_at_absolute_offset=[ - _Constants.underlying_chunk_size + 1, # this recover is after progressing - _Constants.underlying_chunk_size + 1, # this is not - _Constants.underlying_chunk_size * 2 + 1, # this recover is after progressing - _Constants.underlying_chunk_size * 2 + 1, # this is not - _Constants.underlying_chunk_size * 2 + 1, # this is not, we abort here + _Constants.underlying_chunk_size + 1, # this recover is after progressing + _Constants.underlying_chunk_size + 1, # this is not + _Constants.underlying_chunk_size * 2 + 1, # this recover is after progressing + _Constants.underlying_chunk_size * 2 + 1, # this is not + _Constants.underlying_chunk_size * 2 + 1, # this is not, we abort here ], max_recovers_total=None, max_recovers_without_progressing=2, expected_success=False, expected_requested_offsets=[ - 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size, - _Constants.underlying_chunk_size * 2, _Constants.underlying_chunk_size * 2 - ]), - DownloadTestCase(name="New client: non-progressing recovers count is reset when progressing - 2", - enable_new_client=True, - file_size=_Constants.underlying_chunk_size * 10, - failure_at_absolute_offset=[ - 1, _Constants.underlying_chunk_size + 1, _Constants.underlying_chunk_size * 2 + - 1, _Constants.underlying_chunk_size * 3 + 1 - ], - max_recovers_total=None, - max_recovers_without_progressing=1, - expected_success=True, - expected_requested_offsets=[ - 0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 2, - _Constants.underlying_chunk_size * 3 - ]), + 0, + _Constants.underlying_chunk_size, + _Constants.underlying_chunk_size, + _Constants.underlying_chunk_size * 2, + _Constants.underlying_chunk_size * 2, + ], + ), + DownloadTestCase( + name="New client: non-progressing recovers count is reset when progressing - 2", + enable_new_client=True, + file_size=_Constants.underlying_chunk_size * 10, + failure_at_absolute_offset=[ + 1, + _Constants.underlying_chunk_size + 1, + _Constants.underlying_chunk_size * 2 + 1, + _Constants.underlying_chunk_size * 3 + 1, + ], + max_recovers_total=None, + max_recovers_without_progressing=1, + expected_success=True, + expected_requested_offsets=[ + 0, + 0, + _Constants.underlying_chunk_size, + _Constants.underlying_chunk_size * 2, + _Constants.underlying_chunk_size * 3, + ], + ), ], - ids=DownloadTestCase.to_string) + ids=DownloadTestCase.to_string, +) def test_download_recover(config: Config, test_case: DownloadTestCase): test_case.run(config) diff --git a/tests/test_init_file.py b/tests/test_init_file.py index 8bf519e52..d9b084c7a 100644 --- a/tests/test_init_file.py +++ b/tests/test_init_file.py @@ -7,11 +7,11 @@ def test_init_file_contents(): Also see https://github.com/databricks/databricks-sdk-py/issues/343#issuecomment-1866029118. """ - with open('databricks/__init__.py') as f: + with open("databricks/__init__.py") as f: init_file_contents = f.read() # This hash is the expected hash of the contents of `src/databricks/__init__.py`. # It must not change, or else parallel package installation may lead to clobbered and invalid files. - expected_sha1 = '2772edbf52e517542acf8c039479c4b57b6ca2cd' - actual_sha1 = hashlib.sha1(init_file_contents.encode('utf-8')).hexdigest() + expected_sha1 = "2772edbf52e517542acf8c039479c4b57b6ca2cd" + actual_sha1 = hashlib.sha1(init_file_contents.encode("utf-8")).hexdigest() assert expected_sha1 == actual_sha1 diff --git a/tests/test_internal.py b/tests/test_internal.py index cce8e1af7..d432b5cb4 100644 --- a/tests/test_internal.py +++ b/tests/test_internal.py @@ -7,24 +7,24 @@ class A(Enum): - a = 'a' - b = 'b' + a = "a" + b = "b" def test_enum(): - assert _enum({'field': 'a'}, 'field', A) == A.a + assert _enum({"field": "a"}, "field", A) == A.a def test_enum_unknown(): - assert _enum({'field': 'c'}, 'field', A) is None + assert _enum({"field": "c"}, "field", A) is None def test_repeated_enum(): - assert _repeated_enum({'field': ['a', 'b']}, 'field', A) == [A.a, A.b] + assert _repeated_enum({"field": ["a", "b"]}, "field", A) == [A.a, A.b] def test_repeated_enum_unknown(): - assert _repeated_enum({'field': ['a', 'c']}, 'field', A) == [A.a] + assert _repeated_enum({"field": ["a", "c"]}, "field", A) == [A.a] @dataclass @@ -32,20 +32,23 @@ class B: field: str @classmethod - def from_dict(cls, d: dict) -> 'B': - return cls(d['field']) + def from_dict(cls, d: dict) -> "B": + return cls(d["field"]) def test_from_dict(): - assert _from_dict({'x': {'field': 'a'}}, 'x', B) == B('a') + assert _from_dict({"x": {"field": "a"}}, "x", B) == B("a") def test_repeated_dict(): - assert _repeated_dict({'x': [{'field': 'a'}, {'field': 'c'}]}, 'x', B) == [B('a'), B('c')] + assert _repeated_dict({"x": [{"field": "a"}, {"field": "c"}]}, "x", B) == [ + B("a"), + B("c"), + ] def test_escape_multi_segment_path_parameter(): - assert _escape_multi_segment_path_parameter('a b') == 'a%20b' - assert _escape_multi_segment_path_parameter('a/b') == 'a/b' - assert _escape_multi_segment_path_parameter('a?b') == 'a%3Fb' - assert _escape_multi_segment_path_parameter('a#b') == 'a%23b' + assert _escape_multi_segment_path_parameter("a b") == "a%20b" + assert _escape_multi_segment_path_parameter("a/b") == "a/b" + assert _escape_multi_segment_path_parameter("a?b") == "a%3Fb" + assert _escape_multi_segment_path_parameter("a#b") == "a%23b" diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 50143f193..5b8f5f182 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -3,13 +3,14 @@ # Test cases below are checking that we pinned API 2.1 for certain endpoints, DO NOT REMOVE OR CHANGE THEM. https://databricks.atlassian.net/browse/JOBS-19298 def test_jobs_create(config, requests_mock): - requests_mock.post("http://localhost/api/2.1/jobs/create", - request_headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - }, - text="null", - ) + requests_mock.post( + "http://localhost/api/2.1/jobs/create", + request_headers={ + "Accept": "application/json", + "Content-Type": "application/json", + }, + text="null", + ) w = WorkspaceClient(config=config) w.jobs.create() @@ -19,13 +20,14 @@ def test_jobs_create(config, requests_mock): def test_jobs_update(config, requests_mock): - requests_mock.post("http://localhost/api/2.1/jobs/update", - request_headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - }, - text="null", - ) + requests_mock.post( + "http://localhost/api/2.1/jobs/update", + request_headers={ + "Accept": "application/json", + "Content-Type": "application/json", + }, + text="null", + ) w = WorkspaceClient(config=config) w.jobs.update(job_id="job_id") @@ -35,12 +37,13 @@ def test_jobs_update(config, requests_mock): def test_jobs_list(config, requests_mock): - requests_mock.get("http://localhost/api/2.1/jobs/list", - request_headers={ - 'Accept': 'application/json', - }, - text="null", - ) + requests_mock.get( + "http://localhost/api/2.1/jobs/list", + request_headers={ + "Accept": "application/json", + }, + text="null", + ) w = WorkspaceClient(config=config) for _ in w.jobs.list(): @@ -51,12 +54,13 @@ def test_jobs_list(config, requests_mock): def test_jobs_get(config, requests_mock): - requests_mock.get("http://localhost/api/2.1/jobs/get", - request_headers={ - 'Accept': 'application/json', - }, - text="null", - ) + requests_mock.get( + "http://localhost/api/2.1/jobs/get", + request_headers={ + "Accept": "application/json", + }, + text="null", + ) w = WorkspaceClient(config=config) w.jobs.get(job_id="job_id") @@ -66,13 +70,14 @@ def test_jobs_get(config, requests_mock): def test_jobs_reset(config, requests_mock): - requests_mock.post("http://localhost/api/2.1/jobs/reset", - request_headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - }, - text="null", - ) + requests_mock.post( + "http://localhost/api/2.1/jobs/reset", + request_headers={ + "Accept": "application/json", + "Content-Type": "application/json", + }, + text="null", + ) w = WorkspaceClient(config=config) w.jobs.reset(job_id="job_id", new_settings=None) @@ -82,12 +87,13 @@ def test_jobs_reset(config, requests_mock): def test_jobs_runs_list(config, requests_mock): - requests_mock.get("http://localhost/api/2.1/jobs/runs/list", - request_headers={ - 'Accept': 'application/json', - }, - text="null", - ) + requests_mock.get( + "http://localhost/api/2.1/jobs/runs/list", + request_headers={ + "Accept": "application/json", + }, + text="null", + ) w = WorkspaceClient(config=config) for _ in w.jobs.list_runs(job_id="job_id"): diff --git a/tests/test_jobs_mixin.py b/tests/test_jobs_mixin.py index 25d852d8e..a9039146c 100644 --- a/tests/test_jobs_mixin.py +++ b/tests/test_jobs_mixin.py @@ -11,8 +11,7 @@ def make_getrun_path_pattern(run_id: int, page_token: Optional[str] = None) -> P rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?page_token={page_token}&run_id={run_id}")}' ) else: - return re.compile( - rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?run_id={run_id}")}') + return re.compile(rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?run_id={run_id}")}') def make_getjob_path_pattern(job_id: int, page_token: Optional[str] = None) -> Pattern[str]: @@ -21,8 +20,7 @@ def make_getjob_path_pattern(job_id: int, page_token: Optional[str] = None) -> P rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/get?job_id={job_id}&page_token={page_token}")}' ) else: - return re.compile( - rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/get?job_id={job_id}")}') + return re.compile(rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/get?job_id={job_id}")}') def make_listjobs_path_pattern(page_token: str) -> Pattern[str]: @@ -38,320 +36,313 @@ def make_listruns_path_pattern(page_token: str) -> Pattern[str]: def test_get_run_with_no_pagination(config, requests_mock): - run1 = {"tasks": [{"run_id": 0}, {"run_id": 1}], } + run1 = { + "tasks": [{"run_id": 0}, {"run_id": 1}], + } requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1)) w = WorkspaceClient(config=config) run = w.jobs.get_run(1337, page_token="initialToken") - assert run.as_dict() == {"tasks": [{'run_id': 0}, {'run_id': 1}], } + assert run.as_dict() == { + "tasks": [{"run_id": 0}, {"run_id": 1}], + } def test_get_run_pagination_with_tasks(config, requests_mock): from databricks.sdk.service import compute, jobs - cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12", - custom_tags={"ResourceClass": "SingleNode"}, - num_workers=0, - node_type_id="Standard_DS3_v2", - ) + + cluster_spec = compute.ClusterSpec( + spark_version="11.3.x-scala2.12", + custom_tags={"ResourceClass": "SingleNode"}, + num_workers=0, + node_type_id="Standard_DS3_v2", + ) cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec) cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec) cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec) cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec) run1 = { - "tasks": [{ - "run_id": 0 - }, { - "run_id": 1 - }], - "job_clusters": [cluster1.as_dict(), cluster2.as_dict(), ], - "job_parameters": [{ - "name": "param1", - "value": "value1" - }], + "tasks": [{"run_id": 0}, {"run_id": 1}], + "job_clusters": [ + cluster1.as_dict(), + cluster2.as_dict(), + ], + "job_parameters": [{"name": "param1", "value": "value1"}], "next_page_token": "tokenToSecondPage", } run2 = { - "tasks": [{ - "run_id": 2 - }, { - "run_id": 3 - }], - "job_clusters": [cluster3.as_dict(), cluster4.as_dict(), ], - "job_parameters": [{ - "name": "param2", - "value": "value2" - }], + "tasks": [{"run_id": 2}, {"run_id": 3}], + "job_clusters": [ + cluster3.as_dict(), + cluster4.as_dict(), + ], + "job_parameters": [{"name": "param2", "value": "value2"}], "next_page_token": "tokenToThirdPage", } run3 = {"tasks": [{"run_id": 4}]} requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1)) - requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2)) - requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3)) + requests_mock.get( + make_getrun_path_pattern(1337, "tokenToSecondPage"), + text=json.dumps(run2), + ) + requests_mock.get( + make_getrun_path_pattern(1337, "tokenToThirdPage"), + text=json.dumps(run3), + ) w = WorkspaceClient(config=config) run = w.jobs.get_run(1337, page_token="initialToken") assert run.as_dict() == { - "tasks": [{ - 'run_id': 0 - }, { - 'run_id': 1 - }, { - 'run_id': 2 - }, { - 'run_id': 3 - }, { - 'run_id': 4 - }], - "job_clusters": [cluster1.as_dict(), - cluster2.as_dict(), - cluster3.as_dict(), - cluster4.as_dict()], - "job_parameters": [{ - "name": "param1", - "value": "value1" - }, { - "name": "param2", - "value": "value2" - }], + "tasks": [ + {"run_id": 0}, + {"run_id": 1}, + {"run_id": 2}, + {"run_id": 3}, + {"run_id": 4}, + ], + "job_clusters": [ + cluster1.as_dict(), + cluster2.as_dict(), + cluster3.as_dict(), + cluster4.as_dict(), + ], + "job_parameters": [ + {"name": "param1", "value": "value1"}, + {"name": "param2", "value": "value2"}, + ], } def test_get_run_pagination_with_iterations(config, requests_mock): run1 = { - "tasks": [{ - "run_id": 1337 - }], - "iterations": [{ - "run_id": 0 - }, { - "run_id": 1 - }], + "tasks": [{"run_id": 1337}], + "iterations": [{"run_id": 0}, {"run_id": 1}], "next_page_token": "tokenToSecondPage", } run2 = { - "tasks": [{ - "run_id": 1337 - }], - "iterations": [{ - "run_id": 2 - }, { - "run_id": 3 - }], + "tasks": [{"run_id": 1337}], + "iterations": [{"run_id": 2}, {"run_id": 3}], "next_page_token": "tokenToThirdPage", } - run3 = {"tasks": [{"run_id": 1337}], "iterations": [{"run_id": 4}], } + run3 = { + "tasks": [{"run_id": 1337}], + "iterations": [{"run_id": 4}], + } requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1)) - requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2)) - requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3)) + requests_mock.get( + make_getrun_path_pattern(1337, "tokenToSecondPage"), + text=json.dumps(run2), + ) + requests_mock.get( + make_getrun_path_pattern(1337, "tokenToThirdPage"), + text=json.dumps(run3), + ) w = WorkspaceClient(config=config) run = w.jobs.get_run(1337, page_token="initialToken") assert run.as_dict() == { - "tasks": [{ - 'run_id': 1337 - }], - "iterations": [{ - 'run_id': 0 - }, { - 'run_id': 1 - }, { - 'run_id': 2 - }, { - 'run_id': 3 - }, { - 'run_id': 4 - }], + "tasks": [{"run_id": 1337}], + "iterations": [ + {"run_id": 0}, + {"run_id": 1}, + {"run_id": 2}, + {"run_id": 3}, + {"run_id": 4}, + ], } def test_get_job_with_no_pagination(config, requests_mock): - job1 = {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }} + job1 = { + "settings": { + "tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], + } + } requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1)) w = WorkspaceClient(config=config) job = w.jobs.get(1337, page_token="initialToken") - assert job.as_dict() == {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }} + assert job.as_dict() == { + "settings": { + "tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], + } + } def test_get_job_pagination_with_tasks(config, requests_mock): from databricks.sdk.service import compute, jobs - cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12", - custom_tags={"ResourceClass": "SingleNode"}, - num_workers=0, - node_type_id="Standard_DS3_v2", - ) + + cluster_spec = compute.ClusterSpec( + spark_version="11.3.x-scala2.12", + custom_tags={"ResourceClass": "SingleNode"}, + num_workers=0, + node_type_id="Standard_DS3_v2", + ) cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec) cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec) cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec) cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec) job1 = { "settings": { - "tasks": [{ - "task_key": "taskKey1" - }, { - "task_key": "taskKey2" - }], + "tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], "job_clusters": [cluster1.as_dict(), cluster2.as_dict()], - "parameters": [{ - "name": "param1", - "default": "default1" - }], - "environments": [{ - "environment_key": "key1" - }, { - "environment_key": "key2" - }] + "parameters": [{"name": "param1", "default": "default1"}], + "environments": [ + {"environment_key": "key1"}, + {"environment_key": "key2"}, + ], }, - "next_page_token": "tokenToSecondPage" + "next_page_token": "tokenToSecondPage", } job2 = { "settings": { - "tasks": [{ - "task_key": "taskKey3" - }, { - "task_key": "taskKey4" - }], + "tasks": [{"task_key": "taskKey3"}, {"task_key": "taskKey4"}], "job_clusters": [cluster3.as_dict(), cluster4.as_dict()], - "parameters": [{ - "name": "param2", - "default": "default2" - }], - "environments": [{ - "environment_key": "key3" - }] + "parameters": [{"name": "param2", "default": "default2"}], + "environments": [{"environment_key": "key3"}], }, - "next_page_token": "tokenToThirdPage" + "next_page_token": "tokenToThirdPage", } job3 = { "settings": { - "tasks": [{ - "task_key": "taskKey5" - }], - "parameters": [{ - "name": "param3", - "default": "default3" - }] + "tasks": [{"task_key": "taskKey5"}], + "parameters": [{"name": "param3", "default": "default3"}], }, } requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1)) - requests_mock.get(make_getjob_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(job2)) - requests_mock.get(make_getjob_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(job3)) + requests_mock.get( + make_getjob_path_pattern(1337, "tokenToSecondPage"), + text=json.dumps(job2), + ) + requests_mock.get( + make_getjob_path_pattern(1337, "tokenToThirdPage"), + text=json.dumps(job3), + ) w = WorkspaceClient(config=config) job = w.jobs.get(1337, page_token="initialToken") assert job.as_dict() == { "settings": { - "tasks": [{ - "task_key": "taskKey1" - }, { - "task_key": "taskKey2" - }, { - "task_key": "taskKey3" - }, { - "task_key": "taskKey4" - }, { - "task_key": "taskKey5" - }], - "job_clusters": [cluster1.as_dict(), - cluster2.as_dict(), - cluster3.as_dict(), - cluster4.as_dict()], - "parameters": [{ - "name": "param1", - "default": "default1" - }, { - "name": "param2", - "default": "default2" - }, { - "name": "param3", - "default": "default3" - }], - "environments": [{ - "environment_key": "key1" - }, { - "environment_key": "key2" - }, { - "environment_key": "key3" - }] + "tasks": [ + {"task_key": "taskKey1"}, + {"task_key": "taskKey2"}, + {"task_key": "taskKey3"}, + {"task_key": "taskKey4"}, + {"task_key": "taskKey5"}, + ], + "job_clusters": [ + cluster1.as_dict(), + cluster2.as_dict(), + cluster3.as_dict(), + cluster4.as_dict(), + ], + "parameters": [ + {"name": "param1", "default": "default1"}, + {"name": "param2", "default": "default2"}, + {"name": "param3", "default": "default3"}, + ], + "environments": [ + {"environment_key": "key1"}, + {"environment_key": "key2"}, + {"environment_key": "key3"}, + ], } } def test_list_jobs_without_task_expansion(config, requests_mock): listjobs_page1 = { - "jobs": [{ + "jobs": [ + { + "job_id": 100, + "settings": { + "name": "job100", + }, + }, + { + "job_id": 200, + "settings": { + "name": "job200", + }, + }, + { + "job_id": 300, + "settings": { + "name": "job300", + }, + }, + ], + "next_page_token": "tokenToSecondPage", + } + listjobs_page2 = { + "jobs": [ + { + "job_id": 400, + "settings": { + "name": "job400", + }, + }, + { + "job_id": 500, + "settings": { + "name": "job500", + }, + }, + ] + } + + requests_mock.get( + make_listjobs_path_pattern("initialToken"), + text=json.dumps(listjobs_page1), + ) + requests_mock.get( + make_listjobs_path_pattern("tokenToSecondPage"), + text=json.dumps(listjobs_page2), + ) + w = WorkspaceClient(config=config) + + # Converts the iterator to a list in order to compare the results + jobs_list = list(w.jobs.list(expand_tasks=False, page_token="initialToken")) + jobs_dict = [job.as_dict() for job in jobs_list] + + assert jobs_dict == [ + { "job_id": 100, "settings": { "name": "job100", }, - }, { + }, + { "job_id": 200, "settings": { "name": "job200", - } - }, { + }, + }, + { "job_id": 300, "settings": { "name": "job300", - } - }], - "next_page_token": - "tokenToSecondPage" - } - listjobs_page2 = { - "jobs": [{ + }, + }, + { "job_id": 400, "settings": { "name": "job400", - } - }, { + }, + }, + { "job_id": 500, "settings": { "name": "job500", - } - }] - } - - requests_mock.get(make_listjobs_path_pattern("initialToken"), text=json.dumps(listjobs_page1)) - requests_mock.get(make_listjobs_path_pattern("tokenToSecondPage"), text=json.dumps(listjobs_page2)) - w = WorkspaceClient(config=config) - - # Converts the iterator to a list in order to compare the results - jobs_list = list(w.jobs.list(expand_tasks=False, page_token="initialToken")) - jobs_dict = [job.as_dict() for job in jobs_list] - - assert jobs_dict == [{ - "job_id": 100, - "settings": { - "name": "job100", - } - }, { - "job_id": 200, - "settings": { - "name": "job200", - } - }, { - "job_id": 300, - "settings": { - "name": "job300", - } - }, { - "job_id": 400, - "settings": { - "name": "job400", - } - }, { - "job_id": 500, - "settings": { - "name": "job500", - } - }] + }, + }, + ] # only two requests should be made which are jobs/list requests assert requests_mock.call_count == 2 @@ -359,346 +350,318 @@ def test_list_jobs_without_task_expansion(config, requests_mock): def test_list_jobs_with_many_tasks(config, requests_mock): from databricks.sdk.service import compute, jobs - cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12", - custom_tags={"ResourceClass": "SingleNode"}, - num_workers=0, - node_type_id="Standard_DS3_v2", - ) + + cluster_spec = compute.ClusterSpec( + spark_version="11.3.x-scala2.12", + custom_tags={"ResourceClass": "SingleNode"}, + num_workers=0, + node_type_id="Standard_DS3_v2", + ) cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec) cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec) cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec) cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec) listjobs_page1 = { - "jobs": [{ - "job_id": 100, - "settings": { - "tasks": [{ - "task_key": "taskkey105" - }, { - "task_key": "taskkey103" - }], - "job_clusters": [cluster1.as_dict(), cluster2.as_dict()], - "parameters": [{ - "name": "param1", - "default": "default1" - }], - "environments": [{ - "environment_key": "key1" - }, { - "environment_key": "key2" - }] + "jobs": [ + { + "job_id": 100, + "settings": { + "tasks": [ + {"task_key": "taskkey105"}, + {"task_key": "taskkey103"}, + ], + "job_clusters": [cluster1.as_dict(), cluster2.as_dict()], + "parameters": [{"name": "param1", "default": "default1"}], + "environments": [ + {"environment_key": "key1"}, + {"environment_key": "key2"}, + ], + }, + "has_more": True, }, - "has_more": True - }, { - "job_id": 200, - "settings": { - "tasks": [{ - "task_key": "taskkey201" - }, { - "task_key": "taskkey202" - }] + { + "job_id": 200, + "settings": { + "tasks": [ + {"task_key": "taskkey201"}, + {"task_key": "taskkey202"}, + ] + }, + "has_more": True, }, - "has_more": True - }, { - "job_id": 300, - "settings": { - "tasks": [{ - "task_key": "taskkey301" - }] - } - }], - "next_page_token": - "tokenToSecondPage" + { + "job_id": 300, + "settings": {"tasks": [{"task_key": "taskkey301"}]}, + }, + ], + "next_page_token": "tokenToSecondPage", } listjobs_page2 = { - "jobs": [{ - "job_id": 400, - "settings": { - "tasks": [{ - "task_key": "taskkey401" - }, { - "task_key": "taskkey402" - }], - "job_clusters": [cluster1.as_dict()], - }, - "has_more": True - }] + "jobs": [ + { + "job_id": 400, + "settings": { + "tasks": [ + {"task_key": "taskkey401"}, + {"task_key": "taskkey402"}, + ], + "job_clusters": [cluster1.as_dict()], + }, + "has_more": True, + } + ] } getjob_100_page1 = { "job_id": 100, "settings": { - "tasks": [{ - "task_key": "taskkey101" - }, { - "task_key": "taskkey102" - }], + "tasks": [{"task_key": "taskkey101"}, {"task_key": "taskkey102"}], "job_clusters": [cluster1.as_dict(), cluster2.as_dict()], - "parameters": [{ - "name": "param1", - "default": "default1" - }], - "environments": [{ - "environment_key": "key1" - }, { - "environment_key": "key2" - }] + "parameters": [{"name": "param1", "default": "default1"}], + "environments": [ + {"environment_key": "key1"}, + {"environment_key": "key2"}, + ], }, - "next_page_token": "tokenToSecondPage_100" + "next_page_token": "tokenToSecondPage_100", } getjob_100_page2 = { "job_id": 100, "settings": { - "tasks": [{ - "task_key": "taskkey103" - }, { - "task_key": "taskkey104" - }], + "tasks": [{"task_key": "taskkey103"}, {"task_key": "taskkey104"}], "job_clusters": [cluster3.as_dict(), cluster4.as_dict()], - "parameters": [{ - "name": "param2", - "default": "default2" - }], - "environments": [{ - "environment_key": "key3" - }, { - "environment_key": "key4" - }] + "parameters": [{"name": "param2", "default": "default2"}], + "environments": [ + {"environment_key": "key3"}, + {"environment_key": "key4"}, + ], }, - "next_page_token": "tokenToThirdPage_100" + "next_page_token": "tokenToThirdPage_100", } getjob_100_page3 = { "job_id": 100, "settings": { - "tasks": [{ - "task_key": "taskkey105" - }], - "environments": [{ - "environment_key": "key5" - }] - } + "tasks": [{"task_key": "taskkey105"}], + "environments": [{"environment_key": "key5"}], + }, } getjob_200_page1 = { "job_id": 200, - "settings": { - "tasks": [{ - "task_key": "taskkey201" - }, { - "task_key": "taskkey202" - }] - }, - "next_page_token": "tokenToSecondPage_200" + "settings": {"tasks": [{"task_key": "taskkey201"}, {"task_key": "taskkey202"}]}, + "next_page_token": "tokenToSecondPage_200", } getjob_200_page2 = { "job_id": 200, - "settings": { - "tasks": [{ - "task_key": "taskkey203" - }, { - "task_key": "taskkey204" - }] - } + "settings": {"tasks": [{"task_key": "taskkey203"}, {"task_key": "taskkey204"}]}, + } + getjob_300_page1 = { + "job_id": 300, + "settings": {"tasks": [{"task_key": "taskkey301"}]}, } - getjob_300_page1 = {"job_id": 300, "settings": {"tasks": [{"task_key": "taskkey301"}]}} getjob_400_page1 = { "job_id": 400, "settings": { "tasks": [ + {"task_key": "taskkey401"}, { - "task_key": "taskkey401" + "task_key": "taskkey403" # jobs/get returns tasks in different order. jobs/get order is the ground truth }, - { - "task_key": - "taskkey403" # jobs/get returns tasks in different order. jobs/get order is the ground truth - } ], - "job_clusters": [cluster1.as_dict()] + "job_clusters": [cluster1.as_dict()], }, - "next_page_token": "tokenToSecondPage_400" + "next_page_token": "tokenToSecondPage_400", } getjob_400_page2 = { "job_id": 400, "settings": { - "tasks": [{ - "task_key": "taskkey402" - }], - "job_clusters": [cluster2.as_dict()] - } + "tasks": [{"task_key": "taskkey402"}], + "job_clusters": [cluster2.as_dict()], + }, } - requests_mock.get(make_listjobs_path_pattern("initialToken"), text=json.dumps(listjobs_page1)) - requests_mock.get(make_listjobs_path_pattern("tokenToSecondPage"), text=json.dumps(listjobs_page2)) + requests_mock.get( + make_listjobs_path_pattern("initialToken"), + text=json.dumps(listjobs_page1), + ) + requests_mock.get( + make_listjobs_path_pattern("tokenToSecondPage"), + text=json.dumps(listjobs_page2), + ) requests_mock.get(make_getjob_path_pattern(100), text=json.dumps(getjob_100_page1)) - requests_mock.get(make_getjob_path_pattern(100, "tokenToSecondPage_100"), - text=json.dumps(getjob_100_page2)) - requests_mock.get(make_getjob_path_pattern(100, "tokenToThirdPage_100"), - text=json.dumps(getjob_100_page3)) + requests_mock.get( + make_getjob_path_pattern(100, "tokenToSecondPage_100"), + text=json.dumps(getjob_100_page2), + ) + requests_mock.get( + make_getjob_path_pattern(100, "tokenToThirdPage_100"), + text=json.dumps(getjob_100_page3), + ) requests_mock.get(make_getjob_path_pattern(200), text=json.dumps(getjob_200_page1)) - requests_mock.get(make_getjob_path_pattern(200, "tokenToSecondPage_200"), - text=json.dumps(getjob_200_page2)) + requests_mock.get( + make_getjob_path_pattern(200, "tokenToSecondPage_200"), + text=json.dumps(getjob_200_page2), + ) requests_mock.get(make_getjob_path_pattern(300), text=json.dumps(getjob_300_page1)) requests_mock.get(make_getjob_path_pattern(400), text=json.dumps(getjob_400_page1)) - requests_mock.get(make_getjob_path_pattern(400, "tokenToSecondPage_400"), - text=json.dumps(getjob_400_page2)) + requests_mock.get( + make_getjob_path_pattern(400, "tokenToSecondPage_400"), + text=json.dumps(getjob_400_page2), + ) w = WorkspaceClient(config=config) # Converts the iterator to a list in order to compare the results jobs_list = list(w.jobs.list(expand_tasks=True, page_token="initialToken")) jobs_dict = [job.as_dict() for job in jobs_list] - assert jobs_dict == [{ - "job_id": 100, - "settings": { - "tasks": [{ - "task_key": "taskkey101" - }, { - "task_key": "taskkey102" - }, { - "task_key": "taskkey103" - }, { - "task_key": "taskkey104" - }, { - "task_key": "taskkey105" - }], - "job_clusters": [cluster1.as_dict(), - cluster2.as_dict(), - cluster3.as_dict(), - cluster4.as_dict()], - "parameters": [{ - "name": "param1", - "default": "default1" - }, { - "name": "param2", - "default": "default2" - }], - "environments": [{ - "environment_key": "key1" - }, { - "environment_key": "key2" - }, { - "environment_key": "key3" - }, { - "environment_key": "key4" - }, { - "environment_key": "key5" - }] - } - }, { - "job_id": 200, - "settings": { - "tasks": [{ - "task_key": "taskkey201" - }, { - "task_key": "taskkey202" - }, { - "task_key": "taskkey203" - }, { - "task_key": "taskkey204" - }] - } - }, { - "job_id": 300, - "settings": { - "tasks": [{ - "task_key": "taskkey301" - }] - } - }, { - "job_id": 400, - "settings": { - "tasks": [{ - "task_key": "taskkey401" - }, { - "task_key": "taskkey403" - }, { - "task_key": "taskkey402" - }], - "job_clusters": [cluster1.as_dict(), cluster2.as_dict()] - } - }] + assert jobs_dict == [ + { + "job_id": 100, + "settings": { + "tasks": [ + {"task_key": "taskkey101"}, + {"task_key": "taskkey102"}, + {"task_key": "taskkey103"}, + {"task_key": "taskkey104"}, + {"task_key": "taskkey105"}, + ], + "job_clusters": [ + cluster1.as_dict(), + cluster2.as_dict(), + cluster3.as_dict(), + cluster4.as_dict(), + ], + "parameters": [ + {"name": "param1", "default": "default1"}, + {"name": "param2", "default": "default2"}, + ], + "environments": [ + {"environment_key": "key1"}, + {"environment_key": "key2"}, + {"environment_key": "key3"}, + {"environment_key": "key4"}, + {"environment_key": "key5"}, + ], + }, + }, + { + "job_id": 200, + "settings": { + "tasks": [ + {"task_key": "taskkey201"}, + {"task_key": "taskkey202"}, + {"task_key": "taskkey203"}, + {"task_key": "taskkey204"}, + ] + }, + }, + {"job_id": 300, "settings": {"tasks": [{"task_key": "taskkey301"}]}}, + { + "job_id": 400, + "settings": { + "tasks": [ + {"task_key": "taskkey401"}, + {"task_key": "taskkey403"}, + {"task_key": "taskkey402"}, + ], + "job_clusters": [cluster1.as_dict(), cluster2.as_dict()], + }, + }, + ] # only two requests should be made which are jobs/list requests assert requests_mock.call_count == 9 # check that job_id 300 was never used in jobs/get call history = requests_mock.request_history - assert all('300' not in request.qs.get("job_id", ['']) for request in history) + assert all("300" not in request.qs.get("job_id", [""]) for request in history) def test_list_runs_without_task_expansion(config, requests_mock): listruns_page1 = { - "runs": [{ - "run_id": 100, - "run_name": "run100", - }, { - "run_id": - 200, - "run_name": - "run200", - "job_parameters": [{ - "name": "param1", - "default": "default1" - }, { - "name": "param2", - "default": "default2" - }] - }, { - "run_id": 300, - "run_name": "run300", - }], - "next_page_token": - "tokenToSecondPage" + "runs": [ + { + "run_id": 100, + "run_name": "run100", + }, + { + "run_id": 200, + "run_name": "run200", + "job_parameters": [ + {"name": "param1", "default": "default1"}, + {"name": "param2", "default": "default2"}, + ], + }, + { + "run_id": 300, + "run_name": "run300", + }, + ], + "next_page_token": "tokenToSecondPage", } listruns_page2 = { - "runs": [{ - "run_id": 400, - "run_name": "run400", - "repair_history": [{ - "id": "repair400_1", - }, { - "id": "repair400_2", - }] - }] + "runs": [ + { + "run_id": 400, + "run_name": "run400", + "repair_history": [ + { + "id": "repair400_1", + }, + { + "id": "repair400_2", + }, + ], + } + ] } - requests_mock.get(make_listruns_path_pattern("initialToken"), text=json.dumps(listruns_page1)) - requests_mock.get(make_listruns_path_pattern("tokenToSecondPage"), text=json.dumps(listruns_page2)) + requests_mock.get( + make_listruns_path_pattern("initialToken"), + text=json.dumps(listruns_page1), + ) + requests_mock.get( + make_listruns_path_pattern("tokenToSecondPage"), + text=json.dumps(listruns_page2), + ) w = WorkspaceClient(config=config) runs_list = list(w.jobs.list_runs(expand_tasks=False, page_token="initialToken")) runs_dict = [run.as_dict() for run in runs_list] - assert runs_dict == [{ - "run_id": 100, - "run_name": "run100", - }, { - "run_id": - 200, - "run_name": - "run200", - "job_parameters": [{ - "name": "param1", - "default": "default1" - }, { - "name": "param2", - "default": "default2" - }] - }, { - "run_id": 300, - "run_name": "run300", - }, { - "run_id": 400, - "run_name": "run400", - "repair_history": [{ - "id": "repair400_1", - }, { - "id": "repair400_2", - }] - }] + assert runs_dict == [ + { + "run_id": 100, + "run_name": "run100", + }, + { + "run_id": 200, + "run_name": "run200", + "job_parameters": [ + {"name": "param1", "default": "default1"}, + {"name": "param2", "default": "default2"}, + ], + }, + { + "run_id": 300, + "run_name": "run300", + }, + { + "run_id": 400, + "run_name": "run400", + "repair_history": [ + { + "id": "repair400_1", + }, + { + "id": "repair400_2", + }, + ], + }, + ] # only two requests should be made which are jobs/list requests assert requests_mock.call_count == 2 @@ -706,111 +669,125 @@ def test_list_runs_without_task_expansion(config, requests_mock): def test_list_runs(config, requests_mock): listruns_page1 = { - "runs": [{ - "run_id": 100, - "tasks": [{ - "task_key": "taskkey101" - }, { - "task_key": "taskkey102" - }], - "has_more": True - }, { - "run_id": 200, - "tasks": [{ - "task_key": "taskkey201" - }] - }, { - "run_id": 300, - "tasks": [{ - "task_key": "taskkey301" - }] - }], - "next_page_token": - "tokenToSecondPage" + "runs": [ + { + "run_id": 100, + "tasks": [ + {"task_key": "taskkey101"}, + {"task_key": "taskkey102"}, + ], + "has_more": True, + }, + {"run_id": 200, "tasks": [{"task_key": "taskkey201"}]}, + {"run_id": 300, "tasks": [{"task_key": "taskkey301"}]}, + ], + "next_page_token": "tokenToSecondPage", } listruns_page2 = { - "runs": [{ - "run_id": 400, - "tasks": [{ - "task_key": "taskkey401" - }, { - "task_key": "taskkey402" - }], - "has_more": True - }] + "runs": [ + { + "run_id": 400, + "tasks": [ + {"task_key": "taskkey401"}, + {"task_key": "taskkey402"}, + ], + "has_more": True, + } + ] } getrun_100_page1 = { "run_id": 100, - "tasks": [{ - "task_key": "taskkey101" - }, { - "task_key": "taskkey102" - }], - "next_page_token": "tokenToSecondPage_100" + "tasks": [{"task_key": "taskkey101"}, {"task_key": "taskkey102"}], + "next_page_token": "tokenToSecondPage_100", } getrun_100_page2 = {"run_id": 100, "tasks": [{"task_key": "taskkey103"}]} getrun_400_page1 = { "run_id": 400, - "tasks": [{ - "task_key": "taskkey401" - }, { - "task_key": "taskkey403" - }], - "next_page_token": "tokenToSecondPage_400" + "tasks": [{"task_key": "taskkey401"}, {"task_key": "taskkey403"}], + "next_page_token": "tokenToSecondPage_400", + } + getrun_400_page2 = { + "run_id": 400, + "tasks": [{"task_key": "taskkey402"}, {"task_key": "taskkey404"}], } - getrun_400_page2 = {"run_id": 400, "tasks": [{"task_key": "taskkey402"}, {"task_key": "taskkey404"}]} - requests_mock.get(make_listruns_path_pattern("initialToken"), text=json.dumps(listruns_page1)) - requests_mock.get(make_listruns_path_pattern("tokenToSecondPage"), text=json.dumps(listruns_page2)) + requests_mock.get( + make_listruns_path_pattern("initialToken"), + text=json.dumps(listruns_page1), + ) + requests_mock.get( + make_listruns_path_pattern("tokenToSecondPage"), + text=json.dumps(listruns_page2), + ) requests_mock.get(make_getrun_path_pattern(100), text=json.dumps(getrun_100_page1)) - requests_mock.get(make_getrun_path_pattern(100, "tokenToSecondPage_100"), - text=json.dumps(getrun_100_page2)) + requests_mock.get( + make_getrun_path_pattern(100, "tokenToSecondPage_100"), + text=json.dumps(getrun_100_page2), + ) requests_mock.get(make_getrun_path_pattern(400), text=json.dumps(getrun_400_page1)) - requests_mock.get(make_getrun_path_pattern(400, "tokenToSecondPage_400"), - text=json.dumps(getrun_400_page2)) + requests_mock.get( + make_getrun_path_pattern(400, "tokenToSecondPage_400"), + text=json.dumps(getrun_400_page2), + ) w = WorkspaceClient(config=config) runs_list = list(w.jobs.list_runs(expand_tasks=True, page_token="initialToken")) runs_dict = [run.as_dict() for run in runs_list] - assert runs_dict == [{ - "run_id": - 100, - "tasks": [{ - "task_key": "taskkey101", - }, { - "task_key": "taskkey102", - }, { - "task_key": "taskkey103", - }], - }, { - "run_id": 200, - "tasks": [{ - "task_key": "taskkey201", - }], - }, { - "run_id": 300, - "tasks": [{ - "task_key": "taskkey301", - }], - }, { - "run_id": - 400, - "tasks": [{ - "task_key": "taskkey401", - }, { - "task_key": "taskkey403", - }, { - "task_key": "taskkey402", - }, { - "task_key": "taskkey404", - }], - }] + assert runs_dict == [ + { + "run_id": 100, + "tasks": [ + { + "task_key": "taskkey101", + }, + { + "task_key": "taskkey102", + }, + { + "task_key": "taskkey103", + }, + ], + }, + { + "run_id": 200, + "tasks": [ + { + "task_key": "taskkey201", + } + ], + }, + { + "run_id": 300, + "tasks": [ + { + "task_key": "taskkey301", + } + ], + }, + { + "run_id": 400, + "tasks": [ + { + "task_key": "taskkey401", + }, + { + "task_key": "taskkey403", + }, + { + "task_key": "taskkey402", + }, + { + "task_key": "taskkey404", + }, + ], + }, + ] # check that job_id 200 and 300 was never used in runs/get call history = requests_mock.request_history - assert all('300' not in request.qs.get("run_id", ['']) for request in history) - assert all('200' not in request.qs.get("run_id", ['']) for request in history) + assert all("300" not in request.qs.get("run_id", [""]) for request in history) + assert all("200" not in request.qs.get("run_id", [""]) for request in history) diff --git a/tests/test_metadata_service_auth.py b/tests/test_metadata_service_auth.py index f2c052006..e293a0b4b 100644 --- a/tests/test_metadata_service_auth.py +++ b/tests/test_metadata_service_auth.py @@ -12,9 +12,11 @@ def get_test_server(host: str, token: str, expires_after: int): def inner(*args, **kwargs): nonlocal counter - headers = kwargs['headers'] - if headers.get(MetadataServiceTokenSource.METADATA_SERVICE_VERSION_HEADER - ) != MetadataServiceTokenSource.METADATA_SERVICE_VERSION: + headers = kwargs["headers"] + if ( + headers.get(MetadataServiceTokenSource.METADATA_SERVICE_VERSION_HEADER) + != MetadataServiceTokenSource.METADATA_SERVICE_VERSION + ): resp = requests.Response() resp.status_code = 400 return resp @@ -26,11 +28,11 @@ def inner(*args, **kwargs): json_data = { "access_token": f"{token}-{counter}", "expires_on": int((datetime.now() + timedelta(seconds=expires_after)).timestamp()), - "token_type": "Bearer" + "token_type": "Bearer", } resp = requests.Response() resp.status_code = 200 - resp._content = json.dumps(json_data).encode('utf-8') + resp._content = json.dumps(json_data).encode("utf-8") counter += 1 return resp @@ -38,39 +40,39 @@ def inner(*args, **kwargs): def test_config_metadata_service(monkeypatch): - monkeypatch.setattr(requests, 'get', get_test_server('https://x', 'token', 100)) - monkeypatch.setenv('DATABRICKS_HOST', 'x') - monkeypatch.setenv('DATABRICKS_METADATA_SERVICE_URL', 'http://y') + monkeypatch.setattr(requests, "get", get_test_server("https://x", "token", 100)) + monkeypatch.setenv("DATABRICKS_HOST", "x") + monkeypatch.setenv("DATABRICKS_METADATA_SERVICE_URL", "http://y") cfg = Config() - assert cfg.auth_type == 'metadata-service' - assert cfg.host == 'https://x' - assert cfg.metadata_service_url == 'http://y' + assert cfg.auth_type == "metadata-service" + assert cfg.host == "https://x" + assert cfg.metadata_service_url == "http://y" def test_config_metadata_service_athenticate(monkeypatch): - monkeypatch.setattr(requests, 'get', get_test_server('https://x', 'token', 1000)) - monkeypatch.setenv('DATABRICKS_HOST', 'x') - monkeypatch.setenv('DATABRICKS_METADATA_SERVICE_URL', 'http://y') + monkeypatch.setattr(requests, "get", get_test_server("https://x", "token", 1000)) + monkeypatch.setenv("DATABRICKS_HOST", "x") + monkeypatch.setenv("DATABRICKS_METADATA_SERVICE_URL", "http://y") cfg = Config() - assert cfg.auth_type == 'metadata-service' - assert cfg.host == 'https://x' - assert cfg.metadata_service_url == 'http://y' + assert cfg.auth_type == "metadata-service" + assert cfg.host == "https://x" + assert cfg.metadata_service_url == "http://y" headers = cfg.authenticate() assert headers.get("Authorization") == "Bearer token-0" def test_config_metadata_service_refresh(monkeypatch): - monkeypatch.setattr(requests, 'get', get_test_server('https://x', 'token', 10)) - monkeypatch.setenv('DATABRICKS_HOST', 'x') - monkeypatch.setenv('DATABRICKS_METADATA_SERVICE_URL', 'http://y') + monkeypatch.setattr(requests, "get", get_test_server("https://x", "token", 10)) + monkeypatch.setenv("DATABRICKS_HOST", "x") + monkeypatch.setenv("DATABRICKS_METADATA_SERVICE_URL", "http://y") cfg = Config() - assert cfg.auth_type == 'metadata-service' - assert cfg.host == 'https://x' - assert cfg.metadata_service_url == 'http://y' + assert cfg.auth_type == "metadata-service" + assert cfg.host == "https://x" + assert cfg.metadata_service_url == "http://y" headers = cfg.authenticate() # the first refresh happens when initialising config. So this is the second refresh diff --git a/tests/test_misc.py b/tests/test_misc.py index 2b46de6f1..e614da32b 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -6,9 +6,15 @@ def test_issue_135(): from databricks.sdk.service.compute import Library, PythonPyPiLibrary from databricks.sdk.service.jobs import Task - jts = Task(libraries=[Library(pypi=PythonPyPiLibrary(package='databricks-sdk'))], task_key='abc') - - assert jts.as_dict() == {'task_key': 'abc', 'libraries': [{'pypi': {'package': 'databricks-sdk'}}]} + jts = Task( + libraries=[Library(pypi=PythonPyPiLibrary(package="databricks-sdk"))], + task_key="abc", + ) + + assert jts.as_dict() == { + "task_key": "abc", + "libraries": [{"pypi": {"package": "databricks-sdk"}}], + } # https://github.com/databricks/databricks-sdk-py/issues/103 @@ -16,30 +22,31 @@ def test_issue_103(): from databricks.sdk.service.compute import ClusterSpec from databricks.sdk.service.jobs import JobCluster - jc = JobCluster(job_cluster_key="no_worker", - new_cluster=ClusterSpec(spark_version="11.3.x-scala2.12", - custom_tags={"ResourceClass": "SingleNode"}, - num_workers=0, - node_type_id="Standard_DS3_v2", - ), - ) + jc = JobCluster( + job_cluster_key="no_worker", + new_cluster=ClusterSpec( + spark_version="11.3.x-scala2.12", + custom_tags={"ResourceClass": "SingleNode"}, + num_workers=0, + node_type_id="Standard_DS3_v2", + ), + ) assert jc.as_dict() == { - 'job_cluster_key': 'no_worker', - 'new_cluster': { - 'custom_tags': { - 'ResourceClass': 'SingleNode' - }, - 'num_workers': 0, - 'node_type_id': 'Standard_DS3_v2', - 'spark_version': '11.3.x-scala2.12' - } + "job_cluster_key": "no_worker", + "new_cluster": { + "custom_tags": {"ResourceClass": "SingleNode"}, + "num_workers": 0, + "node_type_id": "Standard_DS3_v2", + "spark_version": "11.3.x-scala2.12", + }, } def test_serde_with_empty_dataclass(): - inst = catalog.OnlineTableSpec(pipeline_id="123", - run_continuously=catalog.OnlineTableSpecContinuousSchedulingPolicy(), - ) - assert inst.as_dict() == {'pipeline_id': '123', 'run_continuously': {}} + inst = catalog.OnlineTableSpec( + pipeline_id="123", + run_continuously=catalog.OnlineTableSpecContinuousSchedulingPolicy(), + ) + assert inst.as_dict() == {"pipeline_id": "123", "run_continuously": {}} assert inst == catalog.OnlineTableSpec.from_dict(inst.as_dict()) diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py index 49aed33a5..d22217f7f 100644 --- a/tests/test_model_serving_auth.py +++ b/tests/test_model_serving_auth.py @@ -8,119 +8,166 @@ from .conftest import raises -default_auth_base_error_message = \ - "default auth: cannot configure default credentials, " \ - "please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication " \ +default_auth_base_error_message = ( + "default auth: cannot configure default credentials, " + "please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication " "to configure credentials for your preferred authentication method" +) -@pytest.mark.parametrize("env_values, del_env_values, oauth_file_name", - [([ - ('IS_IN_DB_MODEL_SERVING_ENV', 'true'), ('DB_MODEL_SERVING_HOST_URL', 'x') - ], ['DATABRICKS_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), - ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'), - ('DB_MODEL_SERVING_HOST_URL', 'x')], ['DATABRICKS_MODEL_SERVING_HOST_URL'], - "tests/testdata/model-serving-test-token"), - ([('IS_IN_DB_MODEL_SERVING_ENV', 'true'), ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x') - ], ['DB_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), - ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'), - ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x') - ], ['DB_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), ]) +@pytest.mark.parametrize( + "env_values, del_env_values, oauth_file_name", + [ + ( + [ + ("IS_IN_DB_MODEL_SERVING_ENV", "true"), + ("DB_MODEL_SERVING_HOST_URL", "x"), + ], + ["DATABRICKS_MODEL_SERVING_HOST_URL"], + "tests/testdata/model-serving-test-token", + ), + ( + [ + ("IS_IN_DATABRICKS_MODEL_SERVING_ENV", "true"), + ("DB_MODEL_SERVING_HOST_URL", "x"), + ], + ["DATABRICKS_MODEL_SERVING_HOST_URL"], + "tests/testdata/model-serving-test-token", + ), + ( + [ + ("IS_IN_DB_MODEL_SERVING_ENV", "true"), + ("DATABRICKS_MODEL_SERVING_HOST_URL", "x"), + ], + ["DB_MODEL_SERVING_HOST_URL"], + "tests/testdata/model-serving-test-token", + ), + ( + [ + ("IS_IN_DATABRICKS_MODEL_SERVING_ENV", "true"), + ("DATABRICKS_MODEL_SERVING_HOST_URL", "x"), + ], + ["DB_MODEL_SERVING_HOST_URL"], + "tests/testdata/model-serving-test-token", + ), + ], +) def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeypatch, mocker): ## In mlflow we check for these two environment variables to return the correct config - for (env_name, env_value) in env_values: + for env_name, env_value in env_values: monkeypatch.setenv(env_name, env_value) - for (env_name) in del_env_values: + for env_name in del_env_values: monkeypatch.delenv(env_name, raising=False) # patch mlflow to read the file from the test directory monkeypatch.setattr( "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", - oauth_file_name) - mocker.patch('databricks.sdk.config.Config._known_file_config_loader') + oauth_file_name, + ) + mocker.patch("databricks.sdk.config.Config._known_file_config_loader") cfg = Config() - assert cfg.auth_type == 'model-serving' + assert cfg.auth_type == "model-serving" headers = cfg.authenticate() - assert (cfg.host == 'x') + assert cfg.host == "x" # Token defined in the test file - assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token' + assert headers.get("Authorization") == "Bearer databricks_sdk_unit_test_token" @pytest.mark.parametrize( "env_values, oauth_file_name", [ - ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name - ([('IS_IN_DB_MODEL_SERVING_ENV', 'true') - ], "invalid_file_name"), # In Model Serving and Invalid File Name - ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true') - ], "invalid_file_name"), # In Model Serving and Invalid File Name - ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name - ]) + ( + [], + "invalid_file_name", + ), # Not in Model Serving and Invalid File Name + ( + [("IS_IN_DB_MODEL_SERVING_ENV", "true")], + "invalid_file_name", + ), # In Model Serving and Invalid File Name + ( + [("IS_IN_DATABRICKS_MODEL_SERVING_ENV", "true")], + "invalid_file_name", + ), # In Model Serving and Invalid File Name + ( + [], + "tests/testdata/model-serving-test-token", + ), # Not in Model Serving and Valid File Name + ], +) @raises(default_auth_base_error_message) def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch): # Guarantee that the tests defaults to env variables rather than config file. # # TODO: this is hacky and we should find a better way to tell the config # that it should not read from the config file. - monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x') + monkeypatch.setenv("DATABRICKS_CONFIG_FILE", "x") - for (env_name, env_value) in env_values: + for env_name, env_value in env_values: monkeypatch.setenv(env_name, env_value) monkeypatch.setattr( "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", - oauth_file_name) + oauth_file_name, + ) Config() def test_model_serving_auth_refresh(monkeypatch, mocker): ## In mlflow we check for these two environment variables to return the correct config - monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true') - monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x') + monkeypatch.setenv("IS_IN_DB_MODEL_SERVING_ENV", "true") + monkeypatch.setenv("DB_MODEL_SERVING_HOST_URL", "x") # patch mlflow to read the file from the test directory monkeypatch.setattr( "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", - "tests/testdata/model-serving-test-token") - mocker.patch('databricks.sdk.config.Config._known_file_config_loader') + "tests/testdata/model-serving-test-token", + ) + mocker.patch("databricks.sdk.config.Config._known_file_config_loader") cfg = Config() - assert cfg.auth_type == 'model-serving' + assert cfg.auth_type == "model-serving" current_time = time.time() headers = cfg.authenticate() - assert (cfg.host == 'x') - assert headers.get( - "Authorization") == 'Bearer databricks_sdk_unit_test_token' # Token defined in the test file + assert cfg.host == "x" + assert headers.get("Authorization") == "Bearer databricks_sdk_unit_test_token" # Token defined in the test file # Simulate refreshing the token by patching to to a new file monkeypatch.setattr( "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", - "tests/testdata/model-serving-test-token-v2") + "tests/testdata/model-serving-test-token-v2", + ) - monkeypatch.setattr('databricks.sdk.credentials_provider.time.time', lambda: current_time + 10) + monkeypatch.setattr( + "databricks.sdk.credentials_provider.time.time", + lambda: current_time + 10, + ) headers = cfg.authenticate() - assert (cfg.host == 'x') + assert cfg.host == "x" # Read from cache even though new path is set because expiry is still not hit - assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token' + assert headers.get("Authorization") == "Bearer databricks_sdk_unit_test_token" # Expiry is 300 seconds so this should force an expiry and re read from the new file path - monkeypatch.setattr('databricks.sdk.credentials_provider.time.time', lambda: current_time + 600) + monkeypatch.setattr( + "databricks.sdk.credentials_provider.time.time", + lambda: current_time + 600, + ) headers = cfg.authenticate() - assert (cfg.host == 'x') + assert cfg.host == "x" # Read V2 now - assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2' + assert headers.get("Authorization") == "Bearer databricks_sdk_unit_test_token_v2" def test_agent_user_credentials(monkeypatch, mocker): - monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true') - monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x') + monkeypatch.setenv("IS_IN_DB_MODEL_SERVING_ENV", "true") + monkeypatch.setenv("DB_MODEL_SERVING_HOST_URL", "x") monkeypatch.setattr( "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH", - "tests/testdata/model-serving-test-token") + "tests/testdata/model-serving-test-token", + ) invokers_token_val = "databricks_invokers_token" current_thread = threading.current_thread() @@ -128,12 +175,12 @@ def test_agent_user_credentials(monkeypatch, mocker): thread_data["invokers_token"] = invokers_token_val cfg = Config(credentials_strategy=ModelServingUserCredentials()) - assert cfg.auth_type == 'model_serving_user_credentials' + assert cfg.auth_type == "model_serving_user_credentials" headers = cfg.authenticate() - assert (cfg.host == 'x') - assert headers.get("Authorization") == f'Bearer {invokers_token_val}' + assert (cfg.host == "x") + assert headers.get("Authorization") == f"Bearer {invokers_token_val}" # Test updates of invokers token invokers_token_val = "databricks_invokers_token_v2" @@ -142,20 +189,20 @@ def test_agent_user_credentials(monkeypatch, mocker): thread_data["invokers_token"] = invokers_token_val headers = cfg.authenticate() - assert (cfg.host == 'x') - assert headers.get("Authorization") == f'Bearer {invokers_token_val}' + assert cfg.host == "x" + assert headers.get("Authorization") == f"Bearer {invokers_token_val}" # If this credential strategy is being used in a non model serving environments then use default credential strategy instead def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch): - monkeypatch.setenv('DATABRICKS_HOST', 'x') - monkeypatch.setenv('DATABRICKS_TOKEN', 'token') + monkeypatch.setenv("DATABRICKS_HOST", "x") + monkeypatch.setenv("DATABRICKS_TOKEN", "token") cfg = Config(credentials_strategy=ModelServingUserCredentials()) - assert cfg.auth_type == 'pat' # Auth type is PAT as it is no longer in a model serving environment + assert cfg.auth_type == "pat" # Auth type is PAT as it is no longer in a model serving environment headers = cfg.authenticate() - assert (cfg.host == 'https://x') - assert headers.get("Authorization") == f'Bearer token' + assert cfg.host == "https://x" + assert headers.get("Authorization") == f"Bearer token" diff --git a/tests/test_oauth.py b/tests/test_oauth.py index a637a5508..56f7b29a4 100644 --- a/tests/test_oauth.py +++ b/tests/test_oauth.py @@ -7,44 +7,50 @@ def test_token_cache_unique_filename_by_host(): - common_args = dict(client_id="abc", - redirect_url="http://localhost:8020", - oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - assert TokenCache(host="http://localhost:", - **common_args).filename != TokenCache("https://bar.cloud.databricks.com", - **common_args).filename + common_args = dict( + client_id="abc", + redirect_url="http://localhost:8020", + oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"), + ) + assert ( + TokenCache(host="http://localhost:", **common_args).filename + != TokenCache("https://bar.cloud.databricks.com", **common_args).filename + ) def test_token_cache_unique_filename_by_client_id(): - common_args = dict(host="http://localhost:", - redirect_url="http://localhost:8020", - oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", - **common_args).filename + common_args = dict( + host="http://localhost:", + redirect_url="http://localhost:8020", + oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"), + ) + assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", **common_args).filename def test_token_cache_unique_filename_by_scopes(): - common_args = dict(host="http://localhost:", - client_id="abc", - redirect_url="http://localhost:8020", - oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], - **common_args).filename + common_args = dict( + host="http://localhost:", + client_id="abc", + redirect_url="http://localhost:8020", + oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"), + ) + assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], **common_args).filename def test_account_oidc_endpoints(requests_mock): requests_mock.get( "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server", json={ - "authorization_endpoint": - "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", - "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token" - }) + "authorization_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", + "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token", + }, + ) client = _BaseClient(clock=FakeClock()) endpoints = get_account_endpoints("accounts.cloud.databricks.com", "abc-123", client=client) assert endpoints == OidcEndpoints( "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", - "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token") + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token", + ) def test_account_oidc_endpoints_retry_on_429(requests_mock): @@ -67,33 +73,38 @@ def observe_request(_request): requests_mock.get( "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server", additional_matcher=nth_request(0), - status_code=429) + status_code=429, + ) requests_mock.get( "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server", additional_matcher=nth_request(1), json={ - "authorization_endpoint": - "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", - "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token" - }) + "authorization_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", + "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token", + }, + ) client = _BaseClient(clock=FakeClock()) endpoints = get_account_endpoints("accounts.cloud.databricks.com", "abc-123", client=client) assert endpoints == OidcEndpoints( "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize", - "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token") + "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token", + ) def test_workspace_oidc_endpoints(requests_mock): - requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", - json={ - "authorization_endpoint": - "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", - "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token" - }) + requests_mock.get( + "https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", + json={ + "authorization_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token", + }, + ) client = _BaseClient(clock=FakeClock()) endpoints = get_workspace_endpoints("my-workspace.cloud.databricks.com", client=client) - assert endpoints == OidcEndpoints("https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", - "https://my-workspace.cloud.databricks.com/oidc/oauth/token") + assert endpoints == OidcEndpoints( + "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "https://my-workspace.cloud.databricks.com/oidc/oauth/token", + ) def test_workspace_oidc_endpoints_retry_on_429(requests_mock): @@ -110,17 +121,22 @@ def observe_request(_request): return observe_request - requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", - additional_matcher=nth_request(0), - status_code=429) - requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", - additional_matcher=nth_request(1), - json={ - "authorization_endpoint": - "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", - "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token" - }) + requests_mock.get( + "https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", + additional_matcher=nth_request(0), + status_code=429, + ) + requests_mock.get( + "https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server", + additional_matcher=nth_request(1), + json={ + "authorization_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token", + }, + ) client = _BaseClient(clock=FakeClock()) endpoints = get_workspace_endpoints("my-workspace.cloud.databricks.com", client=client) - assert endpoints == OidcEndpoints("https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", - "https://my-workspace.cloud.databricks.com/oidc/oauth/token") + assert endpoints == OidcEndpoints( + "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize", + "https://my-workspace.cloud.databricks.com/oidc/oauth/token", + ) diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index e503da073..5c17e48f7 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -10,8 +10,8 @@ def test_open_ai_client(monkeypatch): from databricks.sdk import WorkspaceClient - monkeypatch.setenv('DATABRICKS_HOST', 'test_host') - monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + monkeypatch.setenv("DATABRICKS_HOST", "test_host") + monkeypatch.setenv("DATABRICKS_TOKEN", "test_token") w = WorkspaceClient(config=Config()) client = w.serving_endpoints.get_open_ai_client() @@ -23,8 +23,8 @@ def test_open_ai_client(monkeypatch): def test_langchain_open_ai_client(monkeypatch): from databricks.sdk import WorkspaceClient - monkeypatch.setenv('DATABRICKS_HOST', 'test_host') - monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + monkeypatch.setenv("DATABRICKS_HOST", "test_host") + monkeypatch.setenv("DATABRICKS_TOKEN", "test_token") w = WorkspaceClient(config=Config()) client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") @@ -33,19 +33,25 @@ def test_langchain_open_ai_client(monkeypatch): def test_http_request(w, requests_mock): - headers = {"Accept": "text/plain", "Content-Type": "application/json", } + headers = { + "Accept": "text/plain", + "Content-Type": "application/json", + } mocked_url = "http://localhost/api/2.0/external-function" blob_response = BytesIO(b"The request was successful") - requests_mock.post(mocked_url, - request_headers=headers, - content=blob_response.getvalue(), - status_code=200, - ) - response = w.serving_endpoints.http_request(conn="test_conn", - method=ExternalFunctionRequestHttpMethod.GET, - path="test_path") + requests_mock.post( + mocked_url, + request_headers=headers, + content=blob_response.getvalue(), + status_code=200, + ) + response = w.serving_endpoints.http_request( + conn="test_conn", + method=ExternalFunctionRequestHttpMethod.GET, + path="test_path", + ) assert requests_mock.call_count == 1 assert requests_mock.called - assert response.status_code == 200 # Verify the response status - assert (response.text == "The request was successful") # Ensure the response body matches the mocked data \ No newline at end of file + assert response.status_code == 200 # Verify the response status + assert response.text == "The request was successful" # Ensure the response body matches the mocked data diff --git a/tests/test_refreshable.py b/tests/test_refreshable.py index 7265026e8..dc3157331 100644 --- a/tests/test_refreshable.py +++ b/tests/test_refreshable.py @@ -8,11 +8,13 @@ class _MockRefreshable(Refreshable): - def __init__(self, - disable_async, - token=None, - stale_duration=timedelta(seconds=60), - refresh_effect: Callable[[], Token] = None): + def __init__( + self, + disable_async, + token=None, + stale_duration=timedelta(seconds=60), + refresh_effect: Callable[[], Token] = None, + ): super().__init__(token, disable_async, stale_duration) self._refresh_effect = refresh_effect self._refresh_count = 0 @@ -37,7 +39,9 @@ def f() -> Token: return f -def blocking_refresh(token: Token) -> (Callable[[], Token], Callable[[], None]): +def blocking_refresh( + token: Token, +) -> (Callable[[], Token], Callable[[], None]): """ Create a refresh function that blocks until unblock is called. @@ -63,7 +67,10 @@ def unblock(): def test_disable_async_stale_does_not_refresh(): - stale_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=50), ) + stale_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=50), + ) r = _MockRefreshable(token=stale_token, disable_async=True, refresh_effect=fail) result = r.token() assert r._refresh_count == 0 @@ -71,7 +78,10 @@ def test_disable_async_stale_does_not_refresh(): def test_disable_async_no_token_does_refresh(): - token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=50), ) + token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=50), + ) r = _MockRefreshable(token=None, disable_async=True, refresh_effect=static_token(token)) result = r.token() assert r._refresh_count == 1 @@ -79,7 +89,9 @@ def test_disable_async_no_token_does_refresh(): def test_disable_async_no_expiration_does_not_refresh(): - non_expiring_token = Token(access_token="access_token", ) + non_expiring_token = Token( + access_token="access_token", + ) r = _MockRefreshable(token=non_expiring_token, disable_async=True, refresh_effect=fail) result = r.token() assert r._refresh_count == 0 @@ -88,7 +100,10 @@ def test_disable_async_no_expiration_does_not_refresh(): def test_disable_async_fresh_does_not_refresh(): # Create a token that is already stale. If async is disabled, the token should not be refreshed. - token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) r = _MockRefreshable(token=token, disable_async=True, refresh_effect=fail) result = r.token() assert r._refresh_count == 0 @@ -96,36 +111,58 @@ def test_disable_async_fresh_does_not_refresh(): def test_disable_async_expired_does_refresh(): - expired_token = Token(access_token="access_token", expiry=datetime.now() - timedelta(seconds=300), ) - new_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + expired_token = Token( + access_token="access_token", + expiry=datetime.now() - timedelta(seconds=300), + ) + new_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) # Add one second to the refresh time to ensure that the call is blocking. # If the call is not blocking, the wait time will ensure that the # old token is returned. - r = _MockRefreshable(token=expired_token, - disable_async=True, - refresh_effect=static_token(new_token, wait=1)) + r = _MockRefreshable( + token=expired_token, + disable_async=True, + refresh_effect=static_token(new_token, wait=1), + ) result = r.token() assert r._refresh_count == 1 assert result == new_token def test_expired_does_refresh(): - expired_token = Token(access_token="access_token", expiry=datetime.now() - timedelta(seconds=300), ) - new_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + expired_token = Token( + access_token="access_token", + expiry=datetime.now() - timedelta(seconds=300), + ) + new_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) # Add one second to the refresh time to ensure that the call is blocking. # If the call is not blocking, the wait time will ensure that the # old token is returned. - r = _MockRefreshable(token=expired_token, - disable_async=False, - refresh_effect=static_token(new_token, wait=1)) + r = _MockRefreshable( + token=expired_token, + disable_async=False, + refresh_effect=static_token(new_token, wait=1), + ) result = r.token() assert r._refresh_count == 1 assert result == new_token def test_stale_does_refresh_async(): - stale_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=50), ) - new_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + stale_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=50), + ) + new_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) # Add one second to the refresh to avoid race conditions. # Without it, the new token may be returned in some cases. refresh, unblock = blocking_refresh(new_token) @@ -146,18 +183,28 @@ def test_stale_does_refresh_async(): def test_no_token_does_refresh(): - new_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + new_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) # Add one second to the refresh time to ensure that the call is blocking. # If the call is not blocking, the wait time will ensure that the # token is not returned. - r = _MockRefreshable(token=None, disable_async=False, refresh_effect=static_token(new_token, wait=1)) + r = _MockRefreshable( + token=None, + disable_async=False, + refresh_effect=static_token(new_token, wait=1), + ) result = r.token() assert r._refresh_count == 1 assert result == new_token def test_fresh_does_not_refresh(): - fresh_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + fresh_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) r = _MockRefreshable(token=fresh_token, disable_async=False, refresh_effect=fail) result = r.token() assert r._refresh_count == 0 @@ -165,8 +212,14 @@ def test_fresh_does_not_refresh(): def test_multiple_calls_dont_start_many_threads(): - stale_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=59), ) - new_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=300), ) + stale_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=59), + ) + new_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=300), + ) refresh, unblock = blocking_refresh(new_token) r = _MockRefreshable(token=stale_token, disable_async=False, refresh_effect=refresh) # Call twice. The second call should not start a new thread. @@ -184,8 +237,14 @@ def test_multiple_calls_dont_start_many_threads(): def test_async_failure_disables_async(): - stale_token = Token(access_token="access_token", expiry=datetime.now() + timedelta(seconds=59), ) - new_token = Token(access_token="new_token", expiry=datetime.now() + timedelta(seconds=300), ) + stale_token = Token( + access_token="access_token", + expiry=datetime.now() + timedelta(seconds=59), + ) + new_token = Token( + access_token="new_token", + expiry=datetime.now() + timedelta(seconds=300), + ) r = _MockRefreshable(token=stale_token, disable_async=False, refresh_effect=fail) # The call should fail and disable async refresh, # but the exception will be catch inside the tread. @@ -205,7 +264,10 @@ def test_async_failure_disables_async(): assert r._refresh_count == 0 # Inject an expired token. - expired_token = Token(access_token="access_token", expiry=datetime.now() - timedelta(seconds=300), ) + expired_token = Token( + access_token="access_token", + expiry=datetime.now() - timedelta(seconds=300), + ) r._token = expired_token # This should be blocking and return the new token. diff --git a/tests/test_retries.py b/tests/test_retries.py index 65dfa4d70..2ad6e4ef6 100644 --- a/tests/test_retries.py +++ b/tests/test_retries.py @@ -18,7 +18,7 @@ def foo(): def test_match_retry_condition_on_conflict(): with pytest.raises(SyntaxError): - @retried(on=[IOError], is_retryable=lambda _: 'always', clock=FakeClock()) + @retried(on=[IOError], is_retryable=lambda _: "always", clock=FakeClock()) def foo(): return 1 @@ -26,7 +26,11 @@ def foo(): def test_match_retry_always(): with pytest.raises(TimeoutError): - @retried(is_retryable=lambda _: 'always', timeout=timedelta(seconds=1), clock=FakeClock()) + @retried( + is_retryable=lambda _: "always", + timeout=timedelta(seconds=1), + clock=FakeClock(), + ) def foo(): raise StopIteration() @@ -36,7 +40,11 @@ def foo(): def test_match_on_errors(): with pytest.raises(TimeoutError): - @retried(on=[KeyError, AttributeError], timeout=timedelta(seconds=0.5), clock=FakeClock()) + @retried( + on=[KeyError, AttributeError], + timeout=timedelta(seconds=0.5), + clock=FakeClock(), + ) def foo(): raise KeyError(1) @@ -56,7 +64,11 @@ def foo(): def test_propagates_outside_exception(): with pytest.raises(KeyError): - @retried(on=[AttributeError], timeout=timedelta(seconds=0.5), clock=FakeClock()) + @retried( + on=[AttributeError], + timeout=timedelta(seconds=0.5), + clock=FakeClock(), + ) def foo(): raise KeyError(1) diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py index ba6f694f5..c9c6889c4 100644 --- a/tests/test_user_agent.py +++ b/tests/test_user_agent.py @@ -8,6 +8,7 @@ @pytest.fixture(scope="function") def user_agent(): from databricks.sdk import useragent + orig_product_name = useragent._product_name orig_product_version = useragent._product_version orig_extra = useragent._extra @@ -24,24 +25,24 @@ def test_user_agent(user_agent): user_agent._reset_product() default = user_agent.to_string() - assert 'unknown/0.0.0' in default - assert 'databricks-sdk-py/' + __version__ in default - assert 'os/' in default - assert 'python/' in default + assert "unknown/0.0.0" in default + assert "databricks-sdk-py/" + __version__ in default + assert "os/" in default + assert "python/" in default @pytest.mark.xdist_group(name="user_agent") def test_user_agent_with_product(user_agent): - user_agent.with_product('test', '1.0.0') - assert 'test/1.0.0' in user_agent.to_string() + user_agent.with_product("test", "1.0.0") + assert "test/1.0.0" in user_agent.to_string() @pytest.mark.xdist_group(name="user_agent") def test_user_agent_with_partner(user_agent): - user_agent.with_partner('test') - user_agent.with_partner('differenttest') - assert 'partner/test' in user_agent.to_string() - assert 'partner/differenttest' in user_agent.to_string() + user_agent.with_partner("test") + user_agent.with_partner("differenttest") + assert "partner/test" in user_agent.to_string() + assert "partner/differenttest" in user_agent.to_string() @pytest.fixture(scope="function") @@ -52,6 +53,7 @@ def clear_cicd(): # Clear cached CICD provider. from databricks.sdk import useragent + useragent._cicd_provider = None yield @@ -62,25 +64,28 @@ def clear_cicd(): def test_user_agent_cicd_no_provider(clear_cicd): from databricks.sdk import useragent + user_agent = useragent.to_string() - assert 'cicd' not in user_agent + assert "cicd" not in user_agent def test_user_agent_cicd_one_provider(clear_cicd): - os.environ['GITHUB_ACTIONS'] = 'true' + os.environ["GITHUB_ACTIONS"] = "true" from databricks.sdk import useragent + user_agent = useragent.to_string() - assert 'cicd/github' in user_agent + assert "cicd/github" in user_agent def test_user_agent_cicd_two_provider(clear_cicd): - os.environ['GITHUB_ACTIONS'] = 'true' - os.environ['GITLAB_CI'] = 'true' + os.environ["GITHUB_ACTIONS"] = "true" + os.environ["GITLAB_CI"] = "true" from databricks.sdk import useragent + user_agent = useragent.to_string() - assert 'cicd/github' in user_agent + assert "cicd/github" in user_agent diff --git a/tests/testdata/test_casing.py b/tests/testdata/test_casing.py index 08162dcb8..ef6257034 100644 --- a/tests/testdata/test_casing.py +++ b/tests/testdata/test_casing.py @@ -3,8 +3,18 @@ from databricks.sdk.casing import Casing -@pytest.mark.parametrize('name, expected', [('', ''), ('a', 'A'), ('abc', 'Abc'), ('Abc', 'Abc'), - ('abc_def', 'Abc-Def'), ('abc-def', 'Abc-Def'), - ('abcDef', 'Abc-Def'), ('AbcDef', 'Abc-Def'), ]) +@pytest.mark.parametrize( + "name, expected", + [ + ("", ""), + ("a", "A"), + ("abc", "Abc"), + ("Abc", "Abc"), + ("abc_def", "Abc-Def"), + ("abc-def", "Abc-Def"), + ("abcDef", "Abc-Def"), + ("AbcDef", "Abc-Def"), + ], +) def test_to_header_case(name, expected): assert Casing.to_header_case(name) == expected From 4915b35080757f44a582a6a3967d8fd763ba13cb Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 26 Feb 2025 18:22:33 +0100 Subject: [PATCH 2/3] unpin black --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d2236d499..82c6be56a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dev = [ "pytest-cov", "pytest-xdist", "pytest-mock", - "black==23.10.0", + "black", "pycodestyle", "autoflake", "isort", From 0aab32ea73d42590ff8438a294d600275e2a6cba Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 26 Feb 2025 18:25:00 +0100 Subject: [PATCH 3/3] make fmt --- tests/test_model_serving_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py index d22217f7f..4f0156f0f 100644 --- a/tests/test_model_serving_auth.py +++ b/tests/test_model_serving_auth.py @@ -179,7 +179,7 @@ def test_agent_user_credentials(monkeypatch, mocker): headers = cfg.authenticate() - assert (cfg.host == "x") + assert cfg.host == "x" assert headers.get("Authorization") == f"Bearer {invokers_token_val}" # Test updates of invokers token